Fix stitching Python bindings PR #22329

This commit is contained in:
Andrew Chinery 2022-09-13 14:35:42 +01:00
parent fc3e393516
commit 26a7647e0e
5 changed files with 89 additions and 10 deletions

View File

@ -138,7 +138,7 @@ public:
@sa detail::MatchesInfo
*/
CV_WRAP_AS(apply2) void operator ()(const std::vector<ImageFeatures> &features, CV_OUT std::vector<MatchesInfo> &pairwise_matches,
const cv::UMat &mask = cv::UMat());
const cv::UMat &mask = cv::UMat()) { match(features, pairwise_matches, mask); };
/** @return True, if it's possible to use the same matcher instance in parallel, false otherwise
*/
@ -161,6 +161,16 @@ protected:
virtual void match(const ImageFeatures &features1, const ImageFeatures &features2,
MatchesInfo& matches_info) = 0;
/** @brief This method implements logic to match features between arbitrary number of features.
By default this checks every pair of inputs in the input, but the behaviour can be changed by subclasses.
@param features vector of image features
@param pairwise_matches found matches
@param mask (optional) mask indicating which image pairs should be matched
*/
virtual void match(const std::vector<ImageFeatures> &features, std::vector<MatchesInfo> &pairwise_matches,
const cv::UMat &mask = cv::UMat());
bool is_thread_safe_;
};
@ -202,11 +212,12 @@ public:
CV_WRAP BestOf2NearestRangeMatcher(int range_width = 5, bool try_use_gpu = false, float match_conf = 0.3f,
int num_matches_thresh1 = 6, int num_matches_thresh2 = 6);
void operator ()(const std::vector<ImageFeatures> &features, std::vector<MatchesInfo> &pairwise_matches,
const cv::UMat &mask = cv::UMat());
protected:
// indicate that we do not want to hide the base class match method with a different signature
using BestOf2NearestMatcher::match;
void match(const std::vector<ImageFeatures> &features, std::vector<MatchesInfo> &pairwise_matches,
const cv::UMat &mask = cv::UMat()) CV_OVERRIDE;
int range_width_;
};

View File

@ -248,7 +248,7 @@ public:
~GraphCutSeamFinder();
CV_WRAP void find(const std::vector<UMat> &src, const std::vector<Point> &corners,
std::vector<UMat> &masks) CV_OVERRIDE;
CV_IN_OUT std::vector<UMat> &masks) CV_OVERRIDE;
private:
// To avoid GCGraph dependency

View File

@ -1,5 +1,6 @@
#!/usr/bin/env python
import cv2 as cv
import numpy as np
from tests_common import NewOpenCVTests
@ -134,6 +135,47 @@ class stitching_matches_info_test(NewOpenCVTests):
self.assertIsNotNone(matches_info.matches)
self.assertIsNotNone(matches_info.inliers_mask)
class stitching_range_matcher_test(NewOpenCVTests):
def test_simple(self):
images = [
self.get_sample('stitching/a1.png'),
self.get_sample('stitching/a2.png'),
self.get_sample('stitching/a3.png')
]
orb = cv.ORB_create()
features = [cv.detail.computeImageFeatures2(orb, img) for img in images]
matcher = cv.detail_BestOf2NearestRangeMatcher(range_width=1)
matches = matcher.apply2(features)
# matches[1] is image 0 and image 1, should have non-zero confidence
self.assertNotEqual(matches[1].confidence, 0)
# matches[2] is image 0 and image 2, should have zero confidence due to range_width=1
self.assertEqual(matches[2].confidence, 0)
class stitching_seam_finder_graph_cuts(NewOpenCVTests):
def test_simple(self):
images = [
self.get_sample('stitching/a1.png'),
self.get_sample('stitching/a2.png'),
self.get_sample('stitching/a3.png')
]
images = [cv.resize(img, [100, 100]) for img in images]
finder = cv.detail_GraphCutSeamFinder('COST_COLOR_GRAD')
masks = [cv.UMat(255 * np.ones((img.shape[0], img.shape[1]), np.uint8)) for img in images]
images_f = [img.astype(np.float32) for img in images]
masks_warped = finder.find(images_f, [(0, 0), (75, 0), (150, 0)], masks)
self.assertIsNotNone(masks_warped)
if __name__ == '__main__':
NewOpenCVTests.bootstrap()

View File

@ -335,8 +335,8 @@ MatchesInfo& MatchesInfo::operator =(const MatchesInfo &other)
//////////////////////////////////////////////////////////////////////////////
void FeaturesMatcher::operator ()(const std::vector<ImageFeatures> &features, std::vector<MatchesInfo> &pairwise_matches,
const UMat &mask)
void FeaturesMatcher::match(const std::vector<ImageFeatures> &features, std::vector<MatchesInfo> &pairwise_matches,
const UMat &mask)
{
const int num_images = static_cast<int>(features.size());
@ -484,8 +484,8 @@ BestOf2NearestRangeMatcher::BestOf2NearestRangeMatcher(int range_width, bool try
}
void BestOf2NearestRangeMatcher::operator ()(const std::vector<ImageFeatures> &features, std::vector<MatchesInfo> &pairwise_matches,
const UMat &mask)
void BestOf2NearestRangeMatcher::match(const std::vector<ImageFeatures> &features, std::vector<MatchesInfo> &pairwise_matches,
const UMat &mask)
{
const int num_images = static_cast<int>(features.size());

View File

@ -114,4 +114,30 @@ TEST(ParallelFeaturesFinder, IsSameWithSerial)
}
}
TEST(RangeMatcher, MatchesRangeOnly)
{
Ptr<Feature2D> finder = ORB::create();
Mat img0 = imread(string(cvtest::TS::ptr()->get_data_path()) + "stitching/a1.png", IMREAD_GRAYSCALE);
Mat img1 = imread(string(cvtest::TS::ptr()->get_data_path()) + "stitching/a2.png", IMREAD_GRAYSCALE);
Mat img2 = imread(string(cvtest::TS::ptr()->get_data_path()) + "stitching/a3.png", IMREAD_GRAYSCALE);
vector<detail::ImageFeatures> features(3);
computeImageFeatures(finder, img0, features[0]);
computeImageFeatures(finder, img1, features[1]);
computeImageFeatures(finder, img2, features[2]);
vector<detail::MatchesInfo> pairwise_matches;
Ptr<detail::FeaturesMatcher> matcher = makePtr<detail::BestOf2NearestRangeMatcher>(1);
(*matcher)(features, pairwise_matches);
// matches[1] will be image 0 and image 1, should have non-zero confidence
EXPECT_NE(pairwise_matches[1].confidence, .0);
// matches[2] will be image 0 and image 2, should have zero confidence due to range_width=1
EXPECT_DOUBLE_EQ(pairwise_matches[2].confidence, .0);
}
}} // namespace