mirror of
https://github.com/opencv/opencv.git
synced 2025-01-18 14:13:15 +08:00
Merge pull request #17618 from Yosshi999:gsoc_sift-better-test
Added/Fixed testcases for SIFT * merge perf_sift into conventional perf tests * Fix disabled SIFT scale invariance tests allows trainIdx duplication in matching scaled keypoints
This commit is contained in:
parent
6259ba1bfd
commit
4064d4c7eb
@ -261,6 +261,10 @@ public:
|
||||
@param contrastThreshold The contrast threshold used to filter out weak features in semi-uniform
|
||||
(low-contrast) regions. The larger the threshold, the less features are produced by the detector.
|
||||
|
||||
@note The contrast threshold will be divided by nOctaveLayers when the filtering is applied. When
|
||||
nOctaveLayers is set to default and if you want to use the value used in D. Lowe paper, 0.03, set
|
||||
this argument to 0.09.
|
||||
|
||||
@param edgeThreshold The threshold used to filter out edge-like features. Note that the its meaning
|
||||
is different from the contrastThreshold, i.e. the larger the edgeThreshold, the less features are
|
||||
filtered out (more features are retained).
|
||||
@ -271,6 +275,8 @@ public:
|
||||
CV_WRAP static Ptr<SIFT> create(int nfeatures = 0, int nOctaveLayers = 3,
|
||||
double contrastThreshold = 0.04, double edgeThreshold = 10,
|
||||
double sigma = 1.6);
|
||||
|
||||
CV_WRAP virtual String getDefaultName() const CV_OVERRIDE;
|
||||
};
|
||||
|
||||
typedef SIFT SiftFeatureDetector;
|
||||
|
@ -21,7 +21,8 @@ namespace opencv_test
|
||||
ORB_DEFAULT, ORB_1500_13_1, \
|
||||
AKAZE_DEFAULT, AKAZE_DESCRIPTOR_KAZE, \
|
||||
BRISK_DEFAULT, \
|
||||
KAZE_DEFAULT
|
||||
KAZE_DEFAULT, \
|
||||
SIFT_DEFAULT
|
||||
|
||||
#define CV_ENUM_EXPAND(name, ...) CV_ENUM(name, __VA_ARGS__)
|
||||
|
||||
@ -77,6 +78,8 @@ static inline Ptr<Feature2D> getFeature2D(Feature2DType type)
|
||||
return KAZE::create();
|
||||
case MSER_DEFAULT:
|
||||
return MSER::create();
|
||||
case SIFT_DEFAULT:
|
||||
return SIFT::create();
|
||||
default:
|
||||
return Ptr<Feature2D>();
|
||||
}
|
||||
|
@ -1,85 +0,0 @@
|
||||
// This file is part of OpenCV project.
|
||||
// It is subject to the license terms in the LICENSE file found in the top-level directory
|
||||
// of this distribution and at http://opencv.org/license.html.
|
||||
#include "perf_precomp.hpp"
|
||||
|
||||
namespace opencv_test { namespace {
|
||||
|
||||
typedef perf::TestBaseWithParam<std::string> SIFT_detect;
|
||||
typedef perf::TestBaseWithParam<std::string> SIFT_extract;
|
||||
typedef perf::TestBaseWithParam<std::string> SIFT_full;
|
||||
|
||||
#define SIFT_IMAGES \
|
||||
"cv/detectors_descriptors_evaluation/images_datasets/leuven/img1.png",\
|
||||
"stitching/a3.png"
|
||||
|
||||
PERF_TEST_P_(SIFT_detect, SIFT)
|
||||
{
|
||||
string filename = getDataPath(GetParam());
|
||||
Mat frame = imread(filename, IMREAD_GRAYSCALE);
|
||||
ASSERT_FALSE(frame.empty()) << "Unable to load source image " << filename;
|
||||
|
||||
Mat mask;
|
||||
declare.in(frame).time(90);
|
||||
Ptr<SIFT> detector = SIFT::create();
|
||||
vector<KeyPoint> points;
|
||||
|
||||
PERF_SAMPLE_BEGIN();
|
||||
detector->detect(frame, points, mask);
|
||||
PERF_SAMPLE_END();
|
||||
|
||||
SANITY_CHECK_NOTHING();
|
||||
}
|
||||
|
||||
PERF_TEST_P_(SIFT_extract, SIFT)
|
||||
{
|
||||
string filename = getDataPath(GetParam());
|
||||
Mat frame = imread(filename, IMREAD_GRAYSCALE);
|
||||
ASSERT_FALSE(frame.empty()) << "Unable to load source image " << filename;
|
||||
|
||||
Mat mask;
|
||||
declare.in(frame).time(90);
|
||||
|
||||
Ptr<SIFT> detector = SIFT::create();
|
||||
vector<KeyPoint> points;
|
||||
Mat descriptors;
|
||||
detector->detect(frame, points, mask);
|
||||
|
||||
PERF_SAMPLE_BEGIN();
|
||||
detector->compute(frame, points, descriptors);
|
||||
PERF_SAMPLE_END();
|
||||
|
||||
SANITY_CHECK_NOTHING();
|
||||
}
|
||||
|
||||
PERF_TEST_P_(SIFT_full, SIFT)
|
||||
{
|
||||
string filename = getDataPath(GetParam());
|
||||
Mat frame = imread(filename, IMREAD_GRAYSCALE);
|
||||
ASSERT_FALSE(frame.empty()) << "Unable to load source image " << filename;
|
||||
|
||||
Mat mask;
|
||||
declare.in(frame).time(90);
|
||||
Ptr<SIFT> detector = SIFT::create();
|
||||
vector<KeyPoint> points;
|
||||
Mat descriptors;
|
||||
|
||||
PERF_SAMPLE_BEGIN();
|
||||
detector->detectAndCompute(frame, mask, points, descriptors, false);
|
||||
PERF_SAMPLE_END();
|
||||
|
||||
SANITY_CHECK_NOTHING();
|
||||
}
|
||||
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(/*nothing*/, SIFT_detect,
|
||||
testing::Values(SIFT_IMAGES)
|
||||
);
|
||||
INSTANTIATE_TEST_CASE_P(/*nothing*/, SIFT_extract,
|
||||
testing::Values(SIFT_IMAGES)
|
||||
);
|
||||
INSTANTIATE_TEST_CASE_P(/*nothing*/, SIFT_full,
|
||||
testing::Values(SIFT_IMAGES)
|
||||
);
|
||||
|
||||
}} // namespace
|
@ -126,6 +126,11 @@ Ptr<SIFT> SIFT::create( int _nfeatures, int _nOctaveLayers,
|
||||
return makePtr<SIFT_Impl>(_nfeatures, _nOctaveLayers, _contrastThreshold, _edgeThreshold, _sigma);
|
||||
}
|
||||
|
||||
String SIFT::getDefaultName() const
|
||||
{
|
||||
return (Feature2D::getDefaultName() + ".SIFT");
|
||||
}
|
||||
|
||||
static inline void
|
||||
unpackOctave(const KeyPoint& kpt, int& octave, int& layer, float& scale)
|
||||
{
|
||||
|
@ -15,6 +15,26 @@ const static std::string IMAGE_TSUKUBA = "features2d/tsukuba.png";
|
||||
const static std::string IMAGE_BIKES = "detectors_descriptors_evaluation/images_datasets/bikes/img1.png";
|
||||
#define Value(...) Values(String_FeatureDetector_DescriptorExtractor_Float_t(__VA_ARGS__))
|
||||
|
||||
static
|
||||
void SetSuitableSIFTOctave(vector<KeyPoint>& keypoints,
|
||||
int firstOctave = -1, int nOctaveLayers = 3, double sigma = 1.6)
|
||||
{
|
||||
for (size_t i = 0; i < keypoints.size(); i++ )
|
||||
{
|
||||
int octv, layer;
|
||||
KeyPoint& kpt = keypoints[i];
|
||||
double octv_layer = std::log(kpt.size / sigma) / std::log(2.) - 1;
|
||||
octv = cvFloor(octv_layer);
|
||||
layer = cvRound( (octv_layer - octv) * nOctaveLayers );
|
||||
if (octv < firstOctave)
|
||||
{
|
||||
octv = firstOctave;
|
||||
layer = 0;
|
||||
}
|
||||
kpt.octave = (layer << 8) | (octv & 255);
|
||||
}
|
||||
}
|
||||
|
||||
static
|
||||
void rotateKeyPoints(const vector<KeyPoint>& src, const Mat& H, float angle, vector<KeyPoint>& dst)
|
||||
{
|
||||
@ -132,6 +152,10 @@ TEST_P(DescriptorScaleInvariance, scale)
|
||||
|
||||
vector<KeyPoint> keypoints1;
|
||||
scaleKeyPoints(keypoints0, keypoints1, 1.0f/scale);
|
||||
if (featureDetector->getDefaultName() == "Feature2D.SIFT")
|
||||
{
|
||||
SetSuitableSIFTOctave(keypoints1);
|
||||
}
|
||||
Mat descriptors1;
|
||||
descriptorExtractor->compute(image1, keypoints1, descriptors1);
|
||||
|
||||
@ -186,9 +210,8 @@ INSTANTIATE_TEST_CASE_P(AKAZE_DESCRIPTOR_KAZE, DescriptorRotationInvariance,
|
||||
* Descriptor's scale invariance check
|
||||
*/
|
||||
|
||||
// TODO: Expected: (descInliersRatio) >= (minInliersRatio), actual: 0.330378 vs 0.78
|
||||
INSTANTIATE_TEST_CASE_P(DISABLED_SIFT, DescriptorScaleInvariance,
|
||||
Value(IMAGE_BIKES, SIFT::create(), SIFT::create(), 0.78f));
|
||||
INSTANTIATE_TEST_CASE_P(SIFT, DescriptorScaleInvariance,
|
||||
Value(IMAGE_BIKES, SIFT::create(0, 3, 0.09), SIFT::create(0, 3, 0.09), 0.78f));
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(AKAZE, DescriptorScaleInvariance,
|
||||
Value(IMAGE_BIKES, AKAZE::create(), AKAZE::create(), 0.6f));
|
||||
|
@ -29,7 +29,6 @@ void matchKeyPoints(const vector<KeyPoint>& keypoints0, const Mat& H,
|
||||
perspectiveTransform(Mat(points0), points0t, H);
|
||||
|
||||
matches.clear();
|
||||
vector<uchar> usedMask(keypoints1.size(), 0);
|
||||
for(int i0 = 0; i0 < static_cast<int>(keypoints0.size()); i0++)
|
||||
{
|
||||
int nearestPointIndex = -1;
|
||||
@ -37,8 +36,6 @@ void matchKeyPoints(const vector<KeyPoint>& keypoints0, const Mat& H,
|
||||
const float r0 = 0.5f * keypoints0[i0].size;
|
||||
for(size_t i1 = 0; i1 < keypoints1.size(); i1++)
|
||||
{
|
||||
if(nearestPointIndex >= 0 && usedMask[i1])
|
||||
continue;
|
||||
|
||||
float r1 = 0.5f * keypoints1[i1].size;
|
||||
float intersectRatio = calcIntersectRatio(points0t.at<Point2f>(i0), r0,
|
||||
@ -51,8 +48,6 @@ void matchKeyPoints(const vector<KeyPoint>& keypoints0, const Mat& H,
|
||||
}
|
||||
|
||||
matches.push_back(DMatch(i0, nearestPointIndex, maxIntersectRatio));
|
||||
if(nearestPointIndex >= 0)
|
||||
usedMask[nearestPointIndex] = 1;
|
||||
}
|
||||
}
|
||||
|
||||
@ -239,9 +234,8 @@ INSTANTIATE_TEST_CASE_P(AKAZE_DESCRIPTOR_KAZE, DetectorRotationInvariance,
|
||||
* Detector's scale invariance check
|
||||
*/
|
||||
|
||||
// TODO: Expected: (keyPointMatchesRatio) >= (minKeyPointMatchesRatio), actual: 0.596752 vs 0.69
|
||||
INSTANTIATE_TEST_CASE_P(DISABLED_SIFT, DetectorScaleInvariance,
|
||||
Value(IMAGE_BIKES, SIFT::create(), 0.69f, 0.98f));
|
||||
INSTANTIATE_TEST_CASE_P(SIFT, DetectorScaleInvariance,
|
||||
Value(IMAGE_BIKES, SIFT::create(0, 3, 0.09), 0.69f, 0.98f));
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(BRISK, DetectorScaleInvariance,
|
||||
Value(IMAGE_BIKES, BRISK::create(), 0.08f, 0.49f));
|
||||
|
Loading…
Reference in New Issue
Block a user