From 4ed91ce7ed6d7646cf2a673b5b4aeec482d746c7 Mon Sep 17 00:00:00 2001 From: Andreas Franek Date: Sun, 7 Mar 2021 20:59:54 +0100 Subject: [PATCH] add estimateAffine3D overload that implements Umeyama's algorithm --- doc/opencv.bib | 10 +++ modules/calib3d/include/opencv2/calib3d.hpp | 27 +++++++ modules/calib3d/src/ptsetreg.cpp | 80 +++++++++++++++++++ .../calib3d/test/test_affine3d_estimator.cpp | 21 +++++ 4 files changed, 138 insertions(+) diff --git a/doc/opencv.bib b/doc/opencv.bib index d44b0f5293..526e3d682c 100644 --- a/doc/opencv.bib +++ b/doc/opencv.bib @@ -1324,3 +1324,13 @@ pages={5551--5560}, year={2017} } +@article{umeyama1991least, + title={Least-squares estimation of transformation parameters between two point patterns}, + author={Umeyama, Shinji}, + journal={IEEE Computer Architecture Letters}, + volume={13}, + number={04}, + pages={376--380}, + year={1991}, + publisher={IEEE Computer Society} +} diff --git a/modules/calib3d/include/opencv2/calib3d.hpp b/modules/calib3d/include/opencv2/calib3d.hpp index efbbbbc014..623c296413 100644 --- a/modules/calib3d/include/opencv2/calib3d.hpp +++ b/modules/calib3d/include/opencv2/calib3d.hpp @@ -3173,6 +3173,33 @@ CV_EXPORTS_W int estimateAffine3D(InputArray src, InputArray dst, OutputArray out, OutputArray inliers, double ransacThreshold = 3, double confidence = 0.99); +/** @brief Computes an optimal affine transformation between two 3D point sets. + +It computes \f$R,s,t\f$ minimizing \f$\sum{i} dst_i - c \cdot R \cdot src_i \f$ +where \f$R\f$ is a 3x3 rotation matrix, \f$t\f$ is a 3x1 translation vector and \f$s\f$ is a +scalar size value. This is an implementation of the algorithm by Umeyama \cite umeyama1991least . +The estimated affine transform has a homogeneous scale which is a subclass of affine +transformations with 7 degrees of freedom. The paired point sets need to comprise at least 3 +points each. + +@param src First input 3D point set. +@param dst Second input 3D point set. +@param scale If null is passed, the scale parameter c will be assumed to be 1.0. +Else the pointed-to variable will be set to the optimal scale. +@param force_rotation If true, the returned rotation will never be a reflection. +This might be unwanted, e.g. when optimizing a transform between a right- and a +left-handed coordinate system. +@return 3D affine transformation matrix \f$3 \times 4\f$ of the form +\f[T = +\begin{bmatrix} +R & t\\ +\end{bmatrix} +\f] + + */ +CV_EXPORTS_W cv::Mat estimateAffine3D(InputArray src, InputArray dst, + CV_OUT double* scale = nullptr, bool force_rotation = true); + /** @brief Computes an optimal translation between two 3D point sets. * * It computes diff --git a/modules/calib3d/src/ptsetreg.cpp b/modules/calib3d/src/ptsetreg.cpp index 6bd3b16c32..5c91fff037 100644 --- a/modules/calib3d/src/ptsetreg.cpp +++ b/modules/calib3d/src/ptsetreg.cpp @@ -900,6 +900,86 @@ int estimateAffine3D(InputArray _from, InputArray _to, return createRANSACPointSetRegistrator(makePtr(), 4, ransacThreshold, confidence)->run(dFrom, dTo, _out, _inliers); } +Mat estimateAffine3D(InputArray _from, InputArray _to, + CV_OUT double* _scale, bool force_rotation) +{ + CV_INSTRUMENT_REGION(); + Mat from = _from.getMat(), to = _to.getMat(); + int count = from.checkVector(3); + + CV_CheckGE(count, 3, "Umeyama algorithm needs at least 3 points for affine transformation estimation."); + CV_CheckEQ(to.checkVector(3), count, "Point sets need to have the same size"); + from = from.reshape(1, count); + to = to.reshape(1, count); + if(from.type() != CV_64F) + from.convertTo(from, CV_64F); + if(to.type() != CV_64F) + to.convertTo(to, CV_64F); + + const double one_over_n = 1./count; + + const auto colwise_mean = [one_over_n](const Mat& m) + { + Mat my; + reduce(m, my, 0, REDUCE_SUM, CV_64F); + return my * one_over_n; + }; + + const auto demean = [count](const Mat& A, const Mat& mean) + { + Mat A_centered = Mat::zeros(count, 3, CV_64F); + for(int i = 0; i < count; i++) + { + A_centered.row(i) = A.row(i) - mean; + } + return A_centered; + }; + + Mat from_mean = colwise_mean(from); + Mat to_mean = colwise_mean(to); + + Mat from_centered = demean(from, from_mean); + Mat to_centered = demean(to, to_mean); + + Mat cov = to_centered.t() * from_centered * one_over_n; + + Mat u,d,vt; + SVD::compute(cov, d, u, vt, SVD::MODIFY_A | SVD::FULL_UV); + + CV_CheckGE(countNonZero(d), 2, "Points cannot be colinear"); + + Mat S = Mat::eye(3, 3, CV_64F); + // det(d) can only ever be >=0, so we can always use this here (compared to the original formula by Umeyama) + if (force_rotation && (determinant(u) * determinant(vt) < 0)) + { + S.at(2, 2) = -1; + } + Mat rmat = u*S*vt; + + double scale = 1.0; + if (_scale) + { + double var_from = 0.; + scale = 0.; + for(int i = 0; i < 3; i++) + { + var_from += norm(from_centered.col(i), NORM_L2SQR); + scale += d.at(i, 0) * S.at(i, i); + } + double inverse_var = count / var_from; + scale *= inverse_var; + *_scale = scale; + } + Mat new_to = scale * rmat * from_mean.t(); + + Mat transform; + transform.create(3, 4, CV_64F); + Mat r_part(transform(Rect(0, 0, 3, 3))); + rmat.copyTo(r_part); + transform.col(3) = to_mean.t() - new_to; + return transform; +} + int estimateTranslation3D(InputArray _from, InputArray _to, OutputArray _out, OutputArray _inliers, double ransacThreshold, double confidence) diff --git a/modules/calib3d/test/test_affine3d_estimator.cpp b/modules/calib3d/test/test_affine3d_estimator.cpp index 521b01ac08..3f1b50e5f2 100644 --- a/modules/calib3d/test/test_affine3d_estimator.cpp +++ b/modules/calib3d/test/test_affine3d_estimator.cpp @@ -201,4 +201,25 @@ TEST(Calib3d_EstimateAffine3D, regression_16007) EXPECT_EQ(1, res); } +TEST(Calib3d_EstimateAffine3D, umeyama_3_pt) +{ + std::vector points = {{{0.80549149, 0.8225781, 0.79949521}, + {0.28906756, 0.57158557, 0.9864789}, + {0.58266182, 0.65474983, 0.25078834}}}; + cv::Mat R = (cv::Mat_(3,3) << 0.9689135, -0.0232753, 0.2463025, + 0.0236362, 0.9997195, 0.0014915, + -0.2462682, 0.0043765, 0.9691918); + cv::Vec3d t(1., 2., 3.); + cv::Affine3d transform(R, t); + std::vector transformed_points(points.size()); + std::transform(points.begin(), points.end(), transformed_points.begin(), [transform](const cv::Vec3d v){return transform * v;}); + double scale; + cv::Mat trafo_est = estimateAffine3D(points, transformed_points, &scale); + Mat R_est(trafo_est(Rect(0, 0, 3, 3))); + EXPECT_LE(cvtest::norm(R_est, R, NORM_INF), 1e-6); + Vec3d t_est = trafo_est.col(3); + EXPECT_LE(cvtest::norm(t_est, t, NORM_INF), 1e-6); + EXPECT_NEAR(scale, 1.0, 1e-6); +} + }} // namespace