mirror of
https://github.com/opencv/opencv.git
synced 2025-06-07 17:44:04 +08:00
Merge pull request #20367 from augustinmanecy:features2d-rw
**Merge with contrib**: https://github.com/opencv/opencv_contrib/pull/3003 ### Pull Request Readiness Checklist See details at https://github.com/opencv/opencv/wiki/How_to_contribute#making-a-good-pull-request - [x] I agree to contribute to the project under Apache 2 License. - [x] To the best of my knowledge, the proposed patch is not based on a code under GPL or other license that is incompatible with OpenCV - [x] The PR is proposed to proper branch - [ ] There is reference to original bug report and related work - [x] There is accuracy test, performance test and test data in opencv_extra repository, if applicable Patch to opencv_extra has the same branch name. - [ ] The feature is well documented and sample code can be built with the project CMake
This commit is contained in:
parent
63b6b24cd0
commit
0bd54a60e9
@ -319,6 +319,21 @@ public:
|
|||||||
double sigma, int descriptorType);
|
double sigma, int descriptorType);
|
||||||
|
|
||||||
CV_WRAP virtual String getDefaultName() const CV_OVERRIDE;
|
CV_WRAP virtual String getDefaultName() const CV_OVERRIDE;
|
||||||
|
|
||||||
|
CV_WRAP virtual void setNFeatures(int maxFeatures) = 0;
|
||||||
|
CV_WRAP virtual int getNFeatures() const = 0;
|
||||||
|
|
||||||
|
CV_WRAP virtual void setNOctaveLayers(int nOctaveLayers) = 0;
|
||||||
|
CV_WRAP virtual int getNOctaveLayers() const = 0;
|
||||||
|
|
||||||
|
CV_WRAP virtual void setContrastThreshold(double contrastThreshold) = 0;
|
||||||
|
CV_WRAP virtual double getContrastThreshold() const = 0;
|
||||||
|
|
||||||
|
CV_WRAP virtual void setEdgeThreshold(double edgeThreshold) = 0;
|
||||||
|
CV_WRAP virtual double getEdgeThreshold() const = 0;
|
||||||
|
|
||||||
|
CV_WRAP virtual void setSigma(double sigma) = 0;
|
||||||
|
CV_WRAP virtual double getSigma() const = 0;
|
||||||
};
|
};
|
||||||
|
|
||||||
typedef SIFT SiftFeatureDetector;
|
typedef SIFT SiftFeatureDetector;
|
||||||
@ -374,14 +389,20 @@ public:
|
|||||||
/** @brief Set detection threshold.
|
/** @brief Set detection threshold.
|
||||||
@param threshold AGAST detection threshold score.
|
@param threshold AGAST detection threshold score.
|
||||||
*/
|
*/
|
||||||
CV_WRAP virtual void setThreshold(int threshold) { CV_UNUSED(threshold); return; }
|
CV_WRAP virtual void setThreshold(int threshold) = 0;
|
||||||
CV_WRAP virtual int getThreshold() const { return -1; }
|
CV_WRAP virtual int getThreshold() const = 0;
|
||||||
|
|
||||||
/** @brief Set detection octaves.
|
/** @brief Set detection octaves.
|
||||||
@param octaves detection octaves. Use 0 to do single scale.
|
@param octaves detection octaves. Use 0 to do single scale.
|
||||||
*/
|
*/
|
||||||
CV_WRAP virtual void setOctaves(int octaves) { CV_UNUSED(octaves); return; }
|
CV_WRAP virtual void setOctaves(int octaves) = 0;
|
||||||
CV_WRAP virtual int getOctaves() const { return -1; }
|
CV_WRAP virtual int getOctaves() const = 0;
|
||||||
|
/** @brief Set detection patternScale.
|
||||||
|
@param patternScale apply this scale to the pattern used for sampling the neighbourhood of a
|
||||||
|
keypoint.
|
||||||
|
*/
|
||||||
|
CV_WRAP virtual void setPatternScale(float patternScale) = 0;
|
||||||
|
CV_WRAP virtual float getPatternScale() const = 0;
|
||||||
};
|
};
|
||||||
|
|
||||||
/** @brief Class implementing the ORB (*oriented BRIEF*) keypoint detector and descriptor extractor
|
/** @brief Class implementing the ORB (*oriented BRIEF*) keypoint detector and descriptor extractor
|
||||||
@ -514,8 +535,27 @@ public:
|
|||||||
CV_WRAP virtual void setMaxArea(int maxArea) = 0;
|
CV_WRAP virtual void setMaxArea(int maxArea) = 0;
|
||||||
CV_WRAP virtual int getMaxArea() const = 0;
|
CV_WRAP virtual int getMaxArea() const = 0;
|
||||||
|
|
||||||
|
CV_WRAP virtual void setMaxVariation(double maxVariation) = 0;
|
||||||
|
CV_WRAP virtual double getMaxVariation() const = 0;
|
||||||
|
|
||||||
|
CV_WRAP virtual void setMinDiversity(double minDiversity) = 0;
|
||||||
|
CV_WRAP virtual double getMinDiversity() const = 0;
|
||||||
|
|
||||||
|
CV_WRAP virtual void setMaxEvolution(int maxEvolution) = 0;
|
||||||
|
CV_WRAP virtual int getMaxEvolution() const = 0;
|
||||||
|
|
||||||
|
CV_WRAP virtual void setAreaThreshold(double areaThreshold) = 0;
|
||||||
|
CV_WRAP virtual double getAreaThreshold() const = 0;
|
||||||
|
|
||||||
|
CV_WRAP virtual void setMinMargin(double min_margin) = 0;
|
||||||
|
CV_WRAP virtual double getMinMargin() const = 0;
|
||||||
|
|
||||||
|
CV_WRAP virtual void setEdgeBlurSize(int edge_blur_size) = 0;
|
||||||
|
CV_WRAP virtual int getEdgeBlurSize() const = 0;
|
||||||
|
|
||||||
CV_WRAP virtual void setPass2Only(bool f) = 0;
|
CV_WRAP virtual void setPass2Only(bool f) = 0;
|
||||||
CV_WRAP virtual bool getPass2Only() const = 0;
|
CV_WRAP virtual bool getPass2Only() const = 0;
|
||||||
|
|
||||||
CV_WRAP virtual String getDefaultName() const CV_OVERRIDE;
|
CV_WRAP virtual String getDefaultName() const CV_OVERRIDE;
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -660,6 +700,9 @@ public:
|
|||||||
CV_WRAP virtual void setBlockSize(int blockSize) = 0;
|
CV_WRAP virtual void setBlockSize(int blockSize) = 0;
|
||||||
CV_WRAP virtual int getBlockSize() const = 0;
|
CV_WRAP virtual int getBlockSize() const = 0;
|
||||||
|
|
||||||
|
CV_WRAP virtual void setGradientSize(int gradientSize_) = 0;
|
||||||
|
CV_WRAP virtual int getGradientSize() = 0;
|
||||||
|
|
||||||
CV_WRAP virtual void setHarrisDetector(bool val) = 0;
|
CV_WRAP virtual void setHarrisDetector(bool val) = 0;
|
||||||
CV_WRAP virtual bool getHarrisDetector() const = 0;
|
CV_WRAP virtual bool getHarrisDetector() const = 0;
|
||||||
|
|
||||||
@ -734,6 +777,10 @@ public:
|
|||||||
|
|
||||||
CV_WRAP static Ptr<SimpleBlobDetector>
|
CV_WRAP static Ptr<SimpleBlobDetector>
|
||||||
create(const SimpleBlobDetector::Params ¶meters = SimpleBlobDetector::Params());
|
create(const SimpleBlobDetector::Params ¶meters = SimpleBlobDetector::Params());
|
||||||
|
|
||||||
|
CV_WRAP virtual void setParams(const SimpleBlobDetector::Params& params ) = 0;
|
||||||
|
CV_WRAP virtual SimpleBlobDetector::Params getParams() const = 0;
|
||||||
|
|
||||||
CV_WRAP virtual String getDefaultName() const CV_OVERRIDE;
|
CV_WRAP virtual String getDefaultName() const CV_OVERRIDE;
|
||||||
CV_WRAP virtual const std::vector<std::vector<cv::Point> >& getBlobContours() const;
|
CV_WRAP virtual const std::vector<std::vector<cv::Point> >& getBlobContours() const;
|
||||||
};
|
};
|
||||||
|
@ -0,0 +1,85 @@
|
|||||||
|
package org.opencv.test.features2d;
|
||||||
|
|
||||||
|
import org.opencv.test.OpenCVTestCase;
|
||||||
|
import org.opencv.test.OpenCVTestRunner;
|
||||||
|
import org.opencv.features2d.AgastFeatureDetector;
|
||||||
|
|
||||||
|
public class AGASTFeatureDetectorTest extends OpenCVTestCase {
|
||||||
|
|
||||||
|
AgastFeatureDetector detector;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void setUp() throws Exception {
|
||||||
|
super.setUp();
|
||||||
|
detector = AgastFeatureDetector.create(); // default (10,true,3)
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testCreate() {
|
||||||
|
assertNotNull(detector);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testDetectListOfMatListOfListOfKeyPoint() {
|
||||||
|
fail("Not yet implemented");
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testDetectListOfMatListOfListOfKeyPointListOfMat() {
|
||||||
|
fail("Not yet implemented");
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testDetectMatListOfKeyPoint() {
|
||||||
|
fail("Not yet implemented");
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testDetectMatListOfKeyPointMat() {
|
||||||
|
fail("Not yet implemented");
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testEmpty() {
|
||||||
|
fail("Not yet implemented");
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testRead() {
|
||||||
|
String filename = OpenCVTestRunner.getTempFileName("xml");
|
||||||
|
writeFile(filename, "<?xml version=\"1.0\"?>\n<opencv_storage>\n<name>Feature2D.AgastFeatureDetector</name>\n<threshold>11</threshold>\n<nonmaxSuppression>0</nonmaxSuppression>\n<type>2</type>\n</opencv_storage>\n");
|
||||||
|
|
||||||
|
detector.read(filename);
|
||||||
|
|
||||||
|
assertEquals(11, detector.getThreshold());
|
||||||
|
assertEquals(false, detector.getNonmaxSuppression());
|
||||||
|
assertEquals(2, detector.getType());
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testReadYml() {
|
||||||
|
String filename = OpenCVTestRunner.getTempFileName("yml");
|
||||||
|
writeFile(filename, "%YAML:1.0\n---\nname: \"Feature2D.AgastFeatureDetector\"\nthreshold: 11\nnonmaxSuppression: 0\ntype: 2\n");
|
||||||
|
|
||||||
|
detector.read(filename);
|
||||||
|
|
||||||
|
assertEquals(11, detector.getThreshold());
|
||||||
|
assertEquals(false, detector.getNonmaxSuppression());
|
||||||
|
assertEquals(2, detector.getType());
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testWrite() {
|
||||||
|
String filename = OpenCVTestRunner.getTempFileName("xml");
|
||||||
|
|
||||||
|
detector.write(filename);
|
||||||
|
|
||||||
|
String truth = "<?xml version=\"1.0\"?>\n<opencv_storage>\n<name>Feature2D.AgastFeatureDetector</name>\n<threshold>10</threshold>\n<nonmaxSuppression>1</nonmaxSuppression>\n<type>3</type>\n</opencv_storage>\n";
|
||||||
|
String actual = readFile(filename);
|
||||||
|
actual = actual.replaceAll("e([+-])0(\\d\\d)", "e$1$2"); // NOTE: workaround for different platforms double representation
|
||||||
|
assertEquals(truth, actual);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testWriteYml() {
|
||||||
|
String filename = OpenCVTestRunner.getTempFileName("yml");
|
||||||
|
|
||||||
|
detector.write(filename);
|
||||||
|
|
||||||
|
String truth = "%YAML:1.0\n---\nname: \"Feature2D.AgastFeatureDetector\"\nthreshold: 10\nnonmaxSuppression: 1\ntype: 3\n";
|
||||||
|
String actual = readFile(filename);
|
||||||
|
actual = actual.replaceAll("e([+-])0(\\d\\d)", "e$1$2"); // NOTE: workaround for different platforms double representation
|
||||||
|
assertEquals(truth, actual);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@ -0,0 +1,67 @@
|
|||||||
|
package org.opencv.test.features2d;
|
||||||
|
|
||||||
|
import org.opencv.test.OpenCVTestCase;
|
||||||
|
import org.opencv.test.OpenCVTestRunner;
|
||||||
|
import org.opencv.features2d.AKAZE;
|
||||||
|
|
||||||
|
public class AKAZEDescriptorExtractorTest extends OpenCVTestCase {
|
||||||
|
|
||||||
|
AKAZE extractor;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void setUp() throws Exception {
|
||||||
|
super.setUp();
|
||||||
|
extractor = AKAZE.create(); // default (5,0,3,0.001f,4,4,1)
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testCreate() {
|
||||||
|
assertNotNull(extractor);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testDetectListOfMatListOfListOfKeyPoint() {
|
||||||
|
fail("Not yet implemented");
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testDetectListOfMatListOfListOfKeyPointListOfMat() {
|
||||||
|
fail("Not yet implemented");
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testDetectMatListOfKeyPoint() {
|
||||||
|
fail("Not yet implemented");
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testDetectMatListOfKeyPointMat() {
|
||||||
|
fail("Not yet implemented");
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testEmpty() {
|
||||||
|
fail("Not yet implemented");
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testReadYml() {
|
||||||
|
String filename = OpenCVTestRunner.getTempFileName("yml");
|
||||||
|
writeFile(filename, "%YAML:1.0\n---\nformat: 3\nname: \"Feature2D.AKAZE\"\ndescriptor: 4\ndescriptor_channels: 2\ndescriptor_size: 32\nthreshold: 0.125\noctaves: 3\nsublevels: 5\ndiffusivity: 2\n");
|
||||||
|
|
||||||
|
extractor.read(filename);
|
||||||
|
|
||||||
|
assertEquals(4, extractor.getDescriptorType());
|
||||||
|
assertEquals(2, extractor.getDescriptorChannels());
|
||||||
|
assertEquals(32, extractor.getDescriptorSize());
|
||||||
|
assertEquals(0.125, extractor.getThreshold());
|
||||||
|
assertEquals(3, extractor.getNOctaves());
|
||||||
|
assertEquals(5, extractor.getNOctaveLayers());
|
||||||
|
assertEquals(2, extractor.getDiffusivity());
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testWriteYml() {
|
||||||
|
String filename = OpenCVTestRunner.getTempFileName("yml");
|
||||||
|
|
||||||
|
extractor.write(filename);
|
||||||
|
|
||||||
|
String truth = "%YAML:1.0\n---\nformat: 3\nname: \"Feature2D.AKAZE\"\ndescriptor: 5\ndescriptor_channels: 3\ndescriptor_size: 0\nthreshold: 1.0000000474974513e-03\noctaves: 4\nsublevels: 4\ndiffusivity: 1\n";
|
||||||
|
String actual = readFile(filename);
|
||||||
|
actual = actual.replaceAll("e([+-])0(\\d\\d)", "e$1$2"); // NOTE: workaround for different platforms double representation
|
||||||
|
assertEquals(truth, actual);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@ -86,7 +86,7 @@ public class BRIEFDescriptorExtractorTest extends OpenCVTestCase {
|
|||||||
|
|
||||||
extractor.write(filename);
|
extractor.write(filename);
|
||||||
|
|
||||||
String truth = "<?xml version=\"1.0\"?>\n<opencv_storage>\n<descriptorSize>32</descriptorSize>\n</opencv_storage>\n";
|
String truth = "<?xml version=\"1.0\"?>\n<opencv_storage>\n<name>Feature2D.BRIEF</name>\n<descriptorSize>32</descriptorSize>\n<use_orientation>0</use_orientation>\n</opencv_storage>\n";
|
||||||
assertEquals(truth, readFile(filename));
|
assertEquals(truth, readFile(filename));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -95,7 +95,7 @@ public class BRIEFDescriptorExtractorTest extends OpenCVTestCase {
|
|||||||
|
|
||||||
extractor.write(filename);
|
extractor.write(filename);
|
||||||
|
|
||||||
String truth = "%YAML:1.0\n---\ndescriptorSize: 32\n";
|
String truth = "%YAML:1.0\n---\nname: \"Feature2D.BRIEF\"\ndescriptorSize: 32\nuse_orientation: 0\n";
|
||||||
assertEquals(truth, readFile(filename));
|
assertEquals(truth, readFile(filename));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -0,0 +1,63 @@
|
|||||||
|
package org.opencv.test.features2d;
|
||||||
|
|
||||||
|
import org.opencv.test.OpenCVTestCase;
|
||||||
|
import org.opencv.test.OpenCVTestRunner;
|
||||||
|
import org.opencv.features2d.BRISK;
|
||||||
|
|
||||||
|
public class BRISKDescriptorExtractorTest extends OpenCVTestCase {
|
||||||
|
|
||||||
|
BRISK extractor;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void setUp() throws Exception {
|
||||||
|
super.setUp();
|
||||||
|
extractor = BRISK.create(); // default (30,3,1)
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testCreate() {
|
||||||
|
assertNotNull(extractor);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testDetectListOfMatListOfListOfKeyPoint() {
|
||||||
|
fail("Not yet implemented");
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testDetectListOfMatListOfListOfKeyPointListOfMat() {
|
||||||
|
fail("Not yet implemented");
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testDetectMatListOfKeyPoint() {
|
||||||
|
fail("Not yet implemented");
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testDetectMatListOfKeyPointMat() {
|
||||||
|
fail("Not yet implemented");
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testEmpty() {
|
||||||
|
fail("Not yet implemented");
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testReadYml() {
|
||||||
|
String filename = OpenCVTestRunner.getTempFileName("yml");
|
||||||
|
writeFile(filename, "%YAML:1.0\n---\nname: \"Feature2D.BRISK\"\nthreshold: 31\noctaves: 4\npatternScale: 1.1\n");
|
||||||
|
|
||||||
|
extractor.read(filename);
|
||||||
|
|
||||||
|
assertEquals(31, extractor.getThreshold());
|
||||||
|
assertEquals(4, extractor.getOctaves());
|
||||||
|
assertEquals(1.1f, extractor.getPatternScale());
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testWriteYml() {
|
||||||
|
String filename = OpenCVTestRunner.getTempFileName("yml");
|
||||||
|
|
||||||
|
extractor.write(filename);
|
||||||
|
|
||||||
|
String truth = "%YAML:1.0\n---\nname: \"Feature2D.BRISK\"\nthreshold: 30\noctaves: 3\npatternScale: 1.\n";
|
||||||
|
String actual = readFile(filename);
|
||||||
|
actual = actual.replaceAll("e([+-])0(\\d\\d)", "e$1$2"); // NOTE: workaround for different platforms double representation
|
||||||
|
assertEquals(truth, actual);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@ -8,7 +8,6 @@ import org.opencv.core.Mat;
|
|||||||
import org.opencv.core.MatOfKeyPoint;
|
import org.opencv.core.MatOfKeyPoint;
|
||||||
import org.opencv.core.Point;
|
import org.opencv.core.Point;
|
||||||
import org.opencv.core.Scalar;
|
import org.opencv.core.Scalar;
|
||||||
import org.opencv.features2d.Feature2D;
|
|
||||||
import org.opencv.features2d.FastFeatureDetector;
|
import org.opencv.features2d.FastFeatureDetector;
|
||||||
import org.opencv.core.KeyPoint;
|
import org.opencv.core.KeyPoint;
|
||||||
import org.opencv.test.OpenCVTestCase;
|
import org.opencv.test.OpenCVTestCase;
|
||||||
@ -17,7 +16,7 @@ import org.opencv.imgproc.Imgproc;
|
|||||||
|
|
||||||
public class FASTFeatureDetectorTest extends OpenCVTestCase {
|
public class FASTFeatureDetectorTest extends OpenCVTestCase {
|
||||||
|
|
||||||
Feature2D detector;
|
FastFeatureDetector detector;
|
||||||
KeyPoint[] truth;
|
KeyPoint[] truth;
|
||||||
|
|
||||||
private Mat getMaskImg() {
|
private Mat getMaskImg() {
|
||||||
@ -78,20 +77,24 @@ public class FASTFeatureDetectorTest extends OpenCVTestCase {
|
|||||||
|
|
||||||
public void testEmpty() {
|
public void testEmpty() {
|
||||||
// assertFalse(detector.empty());
|
// assertFalse(detector.empty());
|
||||||
fail("Not yet implemented"); //FAST does not override empty() method
|
fail("Not yet implemented"); // FAST does not override empty() method
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testRead() {
|
public void testRead() {
|
||||||
String filename = OpenCVTestRunner.getTempFileName("yml");
|
String filename = OpenCVTestRunner.getTempFileName("xml");
|
||||||
|
|
||||||
writeFile(filename, "%YAML:1.0\n---\nthreshold: 130\nnonmaxSuppression: 1\n");
|
writeFile(filename, "<?xml version=\"1.0\"?>\n<opencv_storage>\n<name>Feature2D.FastFeatureDetector</name>\n<threshold>10</threshold>\n<nonmaxSuppression>1</nonmaxSuppression>\n<type>2</type>\n</opencv_storage>\n");
|
||||||
detector.read(filename);
|
detector.read(filename);
|
||||||
|
|
||||||
|
assertEquals(10, detector.getThreshold());
|
||||||
|
assertEquals(true, detector.getNonmaxSuppression());
|
||||||
|
assertEquals(2, detector.getType());
|
||||||
|
|
||||||
MatOfKeyPoint keypoints1 = new MatOfKeyPoint();
|
MatOfKeyPoint keypoints1 = new MatOfKeyPoint();
|
||||||
|
|
||||||
detector.detect(grayChess, keypoints1);
|
detector.detect(grayChess, keypoints1);
|
||||||
|
|
||||||
writeFile(filename, "%YAML:1.0\n---\nthreshold: 150\nnonmaxSuppression: 1\n");
|
writeFile(filename, "<?xml version=\"1.0\"?>\n<opencv_storage>\n<name>Feature2D.FastFeatureDetector</name>\n<threshold>150</threshold>\n<nonmaxSuppression>1</nonmaxSuppression>\n<type>2</type>\n</opencv_storage>\n");
|
||||||
detector.read(filename);
|
detector.read(filename);
|
||||||
|
|
||||||
MatOfKeyPoint keypoints2 = new MatOfKeyPoint();
|
MatOfKeyPoint keypoints2 = new MatOfKeyPoint();
|
||||||
@ -104,16 +107,18 @@ public class FASTFeatureDetectorTest extends OpenCVTestCase {
|
|||||||
public void testReadYml() {
|
public void testReadYml() {
|
||||||
String filename = OpenCVTestRunner.getTempFileName("yml");
|
String filename = OpenCVTestRunner.getTempFileName("yml");
|
||||||
|
|
||||||
writeFile(filename,
|
writeFile(filename, "%YAML:1.0\n---\nthreshold: 130\nnonmaxSuppression: 1\ntype: 2\n");
|
||||||
"<?xml version=\"1.0\"?>\n<opencv_storage>\n<threshold>130</threshold>\n<nonmaxSuppression>1</nonmaxSuppression>\n</opencv_storage>\n");
|
|
||||||
detector.read(filename);
|
detector.read(filename);
|
||||||
|
|
||||||
|
assertEquals(130, detector.getThreshold());
|
||||||
|
assertEquals(true, detector.getNonmaxSuppression());
|
||||||
|
assertEquals(2, detector.getType());
|
||||||
|
|
||||||
MatOfKeyPoint keypoints1 = new MatOfKeyPoint();
|
MatOfKeyPoint keypoints1 = new MatOfKeyPoint();
|
||||||
|
|
||||||
detector.detect(grayChess, keypoints1);
|
detector.detect(grayChess, keypoints1);
|
||||||
|
|
||||||
writeFile(filename,
|
writeFile(filename, "%YAML:1.0\n---\nthreshold: 150\nnonmaxSuppression: 1\ntype: 2\n");
|
||||||
"<?xml version=\"1.0\"?>\n<opencv_storage>\n<threshold>150</threshold>\n<nonmaxSuppression>1</nonmaxSuppression>\n</opencv_storage>\n");
|
|
||||||
detector.read(filename);
|
detector.read(filename);
|
||||||
|
|
||||||
MatOfKeyPoint keypoints2 = new MatOfKeyPoint();
|
MatOfKeyPoint keypoints2 = new MatOfKeyPoint();
|
||||||
@ -123,28 +128,14 @@ public class FASTFeatureDetectorTest extends OpenCVTestCase {
|
|||||||
assertTrue(keypoints2.total() <= keypoints1.total());
|
assertTrue(keypoints2.total() <= keypoints1.total());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testWrite() {
|
|
||||||
String filename = OpenCVTestRunner.getTempFileName("xml");
|
|
||||||
|
|
||||||
detector.write(filename);
|
|
||||||
|
|
||||||
// String truth = "<?xml version=\"1.0\"?>\n<opencv_storage>\n<name>Feature2D.FAST</name>\n<nonmaxSuppression>1</nonmaxSuppression>\n<threshold>10</threshold>\n<type>2</type>\n</opencv_storage>\n";
|
|
||||||
String truth = "<?xml version=\"1.0\"?>\n<opencv_storage>\n</opencv_storage>\n";
|
|
||||||
String data = readFile(filename);
|
|
||||||
//Log.d("qqq", "\"" + data + "\"");
|
|
||||||
assertEquals(truth, data);
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testWriteYml() {
|
public void testWriteYml() {
|
||||||
String filename = OpenCVTestRunner.getTempFileName("yml");
|
String filename = OpenCVTestRunner.getTempFileName("yml");
|
||||||
|
|
||||||
detector.write(filename);
|
detector.write(filename);
|
||||||
|
|
||||||
// String truth = "%YAML:1.0\n---\nname: \"Feature2D.FAST\"\nnonmaxSuppression: 1\nthreshold: 10\ntype: 2\n";
|
String truth = "%YAML:1.0\n---\nname: \"Feature2D.FastFeatureDetector\"\nthreshold: 10\nnonmaxSuppression: 1\ntype: 2\n";
|
||||||
String truth = "%YAML:1.0\n---\n";
|
|
||||||
String data = readFile(filename);
|
String data = readFile(filename);
|
||||||
|
|
||||||
//Log.d("qqq", "\"" + data + "\"");
|
|
||||||
assertEquals(truth, data);
|
assertEquals(truth, data);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -79,8 +79,8 @@ public class Features2dTest extends OpenCVTestCase {
|
|||||||
|
|
||||||
public void testPTOD()
|
public void testPTOD()
|
||||||
{
|
{
|
||||||
String detectorCfg = "%YAML:1.0\n---\nhessianThreshold: 4000.\noctaves: 3\noctaveLayers: 4\nupright: 0\n";
|
String detectorCfg = "%YAML:1.0\n---\nhessianThreshold: 4000.\nextended: 0\nupright: 0\nOctaves: 4\nOctaveLayers: 3\n";
|
||||||
String extractorCfg = "%YAML:1.0\n---\nnOctaves: 4\nnOctaveLayers: 2\nextended: 0\nupright: 0\n";
|
String extractorCfg = "%YAML:1.0\n---\nhessianThreshold: 4000.\nextended: 0\nupright: 0\nOctaves: 4\nOctaveLayers: 3\n";
|
||||||
|
|
||||||
Feature2D detector = createClassInstance(XFEATURES2D+"SURF", DEFAULT_FACTORY, null, null);
|
Feature2D detector = createClassInstance(XFEATURES2D+"SURF", DEFAULT_FACTORY, null, null);
|
||||||
Feature2D extractor = createClassInstance(XFEATURES2D+"SURF", DEFAULT_FACTORY, null, null);
|
Feature2D extractor = createClassInstance(XFEATURES2D+"SURF", DEFAULT_FACTORY, null, null);
|
||||||
|
@ -1,11 +1,21 @@
|
|||||||
package org.opencv.test.features2d;
|
package org.opencv.test.features2d;
|
||||||
|
|
||||||
import org.opencv.test.OpenCVTestCase;
|
import org.opencv.test.OpenCVTestCase;
|
||||||
|
import org.opencv.test.OpenCVTestRunner;
|
||||||
|
import org.opencv.features2d.GFTTDetector;
|
||||||
|
|
||||||
public class GFTTFeatureDetectorTest extends OpenCVTestCase {
|
public class GFTTFeatureDetectorTest extends OpenCVTestCase {
|
||||||
|
|
||||||
|
GFTTDetector detector;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void setUp() throws Exception {
|
||||||
|
super.setUp();
|
||||||
|
detector = GFTTDetector.create(); // default constructor have (1000, 0.01, 1, 3, 3, false, 0.04)
|
||||||
|
}
|
||||||
|
|
||||||
public void testCreate() {
|
public void testCreate() {
|
||||||
fail("Not yet implemented");
|
assertNotNull(detector);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testDetectListOfMatListOfListOfKeyPoint() {
|
public void testDetectListOfMatListOfListOfKeyPoint() {
|
||||||
@ -28,12 +38,30 @@ public class GFTTFeatureDetectorTest extends OpenCVTestCase {
|
|||||||
fail("Not yet implemented");
|
fail("Not yet implemented");
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testRead() {
|
public void testReadYml() {
|
||||||
fail("Not yet implemented");
|
String filename = OpenCVTestRunner.getTempFileName("yml");
|
||||||
|
|
||||||
|
writeFile(filename, "%YAML:1.0\n---\nname: \"Feature2D.GFTTDetector\"\nnfeatures: 500\nqualityLevel: 2.0000000000000000e-02\nminDistance: 2.\nblockSize: 4\ngradSize: 5\nuseHarrisDetector: 1\nk: 5.0000000000000000e-02\n");
|
||||||
|
detector.read(filename);
|
||||||
|
|
||||||
|
assertEquals(500, detector.getMaxFeatures());
|
||||||
|
assertEquals(0.02, detector.getQualityLevel());
|
||||||
|
assertEquals(2.0, detector.getMinDistance());
|
||||||
|
assertEquals(4, detector.getBlockSize());
|
||||||
|
assertEquals(5, detector.getGradientSize());
|
||||||
|
assertEquals(true, detector.getHarrisDetector());
|
||||||
|
assertEquals(0.05, detector.getK());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testWrite() {
|
public void testWriteYml() {
|
||||||
fail("Not yet implemented");
|
String filename = OpenCVTestRunner.getTempFileName("yml");
|
||||||
|
|
||||||
|
detector.write(filename);
|
||||||
|
|
||||||
|
String truth = "%YAML:1.0\n---\nname: \"Feature2D.GFTTDetector\"\nnfeatures: 1000\nqualityLevel: 1.0000000000000000e-02\nminDistance: 1.\nblockSize: 3\ngradSize: 3\nuseHarrisDetector: 0\nk: 4.0000000000000001e-02\n";
|
||||||
|
String actual = readFile(filename);
|
||||||
|
actual = actual.replaceAll("e([+-])0(\\d\\d)", "e$1$2"); // NOTE: workaround for different platforms double representation
|
||||||
|
assertEquals(truth, actual);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -0,0 +1,66 @@
|
|||||||
|
package org.opencv.test.features2d;
|
||||||
|
|
||||||
|
import org.opencv.test.OpenCVTestCase;
|
||||||
|
import org.opencv.test.OpenCVTestRunner;
|
||||||
|
import org.opencv.features2d.KAZE;
|
||||||
|
|
||||||
|
public class KAZEDescriptorExtractorTest extends OpenCVTestCase {
|
||||||
|
|
||||||
|
KAZE extractor;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void setUp() throws Exception {
|
||||||
|
super.setUp();
|
||||||
|
extractor = KAZE.create(); // default (false,false,0.001f,4,4,1)
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testCreate() {
|
||||||
|
assertNotNull(extractor);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testDetectListOfMatListOfListOfKeyPoint() {
|
||||||
|
fail("Not yet implemented");
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testDetectListOfMatListOfListOfKeyPointListOfMat() {
|
||||||
|
fail("Not yet implemented");
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testDetectMatListOfKeyPoint() {
|
||||||
|
fail("Not yet implemented");
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testDetectMatListOfKeyPointMat() {
|
||||||
|
fail("Not yet implemented");
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testEmpty() {
|
||||||
|
fail("Not yet implemented");
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testReadYml() {
|
||||||
|
String filename = OpenCVTestRunner.getTempFileName("yml");
|
||||||
|
writeFile(filename, "%YAML:1.0\n---\nformat: 3\nname: \"Feature2D.KAZE\"\nextended: 1\nupright: 1\nthreshold: 0.125\noctaves: 3\nsublevels: 5\ndiffusivity: 2\n");
|
||||||
|
|
||||||
|
extractor.read(filename);
|
||||||
|
|
||||||
|
assertEquals(true, extractor.getExtended());
|
||||||
|
assertEquals(true, extractor.getUpright());
|
||||||
|
assertEquals(0.125, extractor.getThreshold());
|
||||||
|
assertEquals(3, extractor.getNOctaves());
|
||||||
|
assertEquals(5, extractor.getNOctaveLayers());
|
||||||
|
assertEquals(2, extractor.getDiffusivity());
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testWriteYml() {
|
||||||
|
String filename = OpenCVTestRunner.getTempFileName("yml");
|
||||||
|
|
||||||
|
extractor.write(filename);
|
||||||
|
|
||||||
|
String truth = "%YAML:1.0\n---\nformat: 3\nname: \"Feature2D.KAZE\"\nextended: 0\nupright: 0\nthreshold: 1.0000000474974513e-03\noctaves: 4\nsublevels: 4\ndiffusivity: 1\n";
|
||||||
|
String actual = readFile(filename);
|
||||||
|
actual = actual.replaceAll("e([+-])0(\\d\\d)", "e$1$2"); // NOTE: workaround for different platforms double representation
|
||||||
|
assertEquals(truth, actual);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@ -1,11 +1,21 @@
|
|||||||
package org.opencv.test.features2d;
|
package org.opencv.test.features2d;
|
||||||
|
|
||||||
import org.opencv.test.OpenCVTestCase;
|
import org.opencv.test.OpenCVTestCase;
|
||||||
|
import org.opencv.test.OpenCVTestRunner;
|
||||||
|
import org.opencv.features2d.MSER;
|
||||||
|
|
||||||
public class MSERFeatureDetectorTest extends OpenCVTestCase {
|
public class MSERFeatureDetectorTest extends OpenCVTestCase {
|
||||||
|
|
||||||
|
MSER detector;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void setUp() throws Exception {
|
||||||
|
super.setUp();
|
||||||
|
detector = MSER.create(); // default constructor have (5, 60, 14400, .25, .2, 200, 1.01, .003, 5)
|
||||||
|
}
|
||||||
|
|
||||||
public void testCreate() {
|
public void testCreate() {
|
||||||
fail("Not yet implemented");
|
assertNotNull(detector);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testDetectListOfMatListOfListOfKeyPoint() {
|
public void testDetectListOfMatListOfListOfKeyPoint() {
|
||||||
@ -28,12 +38,33 @@ public class MSERFeatureDetectorTest extends OpenCVTestCase {
|
|||||||
fail("Not yet implemented");
|
fail("Not yet implemented");
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testRead() {
|
public void testReadYml() {
|
||||||
fail("Not yet implemented");
|
String filename = OpenCVTestRunner.getTempFileName("yml");
|
||||||
|
|
||||||
|
writeFile(filename, "%YAML:1.0\n---\nname: \"Feature2D.MSER\"\ndelta: 6\nminArea: 62\nmaxArea: 14402\nmaxVariation: .26\nminDiversity: .3\nmaxEvolution: 201\nareaThreshold: 1.02\nminMargin: 3.0e-3\nedgeBlurSize: 3\npass2Only: 1\n");
|
||||||
|
detector.read(filename);
|
||||||
|
|
||||||
|
assertEquals(6, detector.getDelta());
|
||||||
|
assertEquals(62, detector.getMinArea());
|
||||||
|
assertEquals(14402, detector.getMaxArea());
|
||||||
|
assertEquals(.26, detector.getMaxVariation());
|
||||||
|
assertEquals(.3, detector.getMinDiversity());
|
||||||
|
assertEquals(201, detector.getMaxEvolution());
|
||||||
|
assertEquals(1.02, detector.getAreaThreshold());
|
||||||
|
assertEquals(0.003, detector.getMinMargin());
|
||||||
|
assertEquals(3, detector.getEdgeBlurSize());
|
||||||
|
assertEquals(true, detector.getPass2Only());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testWrite() {
|
public void testWriteYml() {
|
||||||
fail("Not yet implemented");
|
String filename = OpenCVTestRunner.getTempFileName("yml");
|
||||||
|
|
||||||
|
detector.write(filename);
|
||||||
|
|
||||||
|
String truth = "%YAML:1.0\n---\nname: \"Feature2D.MSER\"\ndelta: 5\nminArea: 60\nmaxArea: 14400\nmaxVariation: 2.5000000000000000e-01\nminDiversity: 2.0000000000000001e-01\nmaxEvolution: 200\nareaThreshold: 1.0100000000000000e+00\nminMargin: 3.0000000000000001e-03\nedgeBlurSize: 5\npass2Only: 0\n";
|
||||||
|
String actual = readFile(filename);
|
||||||
|
actual = actual.replaceAll("e([+-])0(\\d\\d)", "e$1$2"); // NOTE: workaround for different platforms double representation
|
||||||
|
assertEquals(truth, actual);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -75,16 +75,25 @@ public class ORBDescriptorExtractorTest extends OpenCVTestCase {
|
|||||||
fail("Not yet implemented"); // ORB does not override empty() method
|
fail("Not yet implemented"); // ORB does not override empty() method
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testRead() {
|
public void testReadYml() {
|
||||||
KeyPoint point = new KeyPoint(55.775577545166016f, 44.224422454833984f, 16, 9.754629f, 8617.863f, 1, -1);
|
KeyPoint point = new KeyPoint(55.775577545166016f, 44.224422454833984f, 16, 9.754629f, 8617.863f, 1, -1);
|
||||||
MatOfKeyPoint keypoints = new MatOfKeyPoint(point);
|
MatOfKeyPoint keypoints = new MatOfKeyPoint(point);
|
||||||
Mat img = getTestImg();
|
Mat img = getTestImg();
|
||||||
Mat descriptors = new Mat();
|
Mat descriptors = new Mat();
|
||||||
|
|
||||||
// String filename = OpenCVTestRunner.getTempFileName("yml");
|
String filename = OpenCVTestRunner.getTempFileName("yml");
|
||||||
// writeFile(filename, "%YAML:1.0\n---\nscaleFactor: 1.1\nnLevels: 3\nfirstLevel: 0\nedgeThreshold: 31\npatchSize: 31\n");
|
writeFile(filename, "%YAML:1.0\n---\nnfeatures: 500\nscaleFactor: 1.1\nnlevels: 3\nedgeThreshold: 31\nfirstLevel: 0\nwta_k: 2\nscoreType: 0\npatchSize: 31\nfastThreshold: 20\n");
|
||||||
// extractor.read(filename);
|
extractor.read(filename);
|
||||||
extractor = ORB.create(500, 1.1f, 3, 31, 0, 2, ORB.HARRIS_SCORE, 31, 20);
|
|
||||||
|
assertEquals(500, extractor.getMaxFeatures());
|
||||||
|
assertEquals(1.1, extractor.getScaleFactor());
|
||||||
|
assertEquals(3, extractor.getNLevels());
|
||||||
|
assertEquals(31, extractor.getEdgeThreshold());
|
||||||
|
assertEquals(0, extractor.getFirstLevel());
|
||||||
|
assertEquals(2, extractor.getWTA_K());
|
||||||
|
assertEquals(0, extractor.getScoreType());
|
||||||
|
assertEquals(31, extractor.getPatchSize());
|
||||||
|
assertEquals(20, extractor.getFastThreshold());
|
||||||
|
|
||||||
extractor.compute(img, keypoints, descriptors);
|
extractor.compute(img, keypoints, descriptors);
|
||||||
|
|
||||||
@ -97,25 +106,13 @@ public class ORBDescriptorExtractorTest extends OpenCVTestCase {
|
|||||||
assertDescriptorsClose(truth, descriptors, 1);
|
assertDescriptorsClose(truth, descriptors, 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testWrite() {
|
|
||||||
String filename = OpenCVTestRunner.getTempFileName("xml");
|
|
||||||
|
|
||||||
extractor.write(filename);
|
|
||||||
|
|
||||||
// String truth = "<?xml version=\"1.0\"?>\n<opencv_storage>\n<name>Feature2D.ORB</name>\n<WTA_K>2</WTA_K>\n<edgeThreshold>31</edgeThreshold>\n<firstLevel>0</firstLevel>\n<nFeatures>500</nFeatures>\n<nLevels>8</nLevels>\n<patchSize>31</patchSize>\n<scaleFactor>1.2000000476837158e+00</scaleFactor>\n<scoreType>0</scoreType>\n</opencv_storage>\n";
|
|
||||||
String truth = "<?xml version=\"1.0\"?>\n<opencv_storage>\n</opencv_storage>\n";
|
|
||||||
String actual = readFile(filename);
|
|
||||||
actual = actual.replaceAll("e\\+000", "e+00"); // NOTE: workaround for different platforms double representation
|
|
||||||
assertEquals(truth, actual);
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testWriteYml() {
|
public void testWriteYml() {
|
||||||
String filename = OpenCVTestRunner.getTempFileName("yml");
|
String filename = OpenCVTestRunner.getTempFileName("yml");
|
||||||
|
|
||||||
extractor.write(filename);
|
extractor.write(filename);
|
||||||
|
|
||||||
// String truth = "%YAML:1.0\n---\nname: \"Feature2D.ORB\"\nWTA_K: 2\nedgeThreshold: 31\nfirstLevel: 0\nnFeatures: 500\nnLevels: 8\npatchSize: 31\nscaleFactor: 1.2000000476837158e+00\nscoreType: 0\n";
|
String truth = "%YAML:1.0\n---\nname: \"Feature2D.ORB\"\nnfeatures: 500\nscaleFactor: 1.2000000476837158e+00\nnlevels: 8\nedgeThreshold: 31\nfirstLevel: 0\nwta_k: 2\nscoreType: 0\npatchSize: 31\nfastThreshold: 20\n";
|
||||||
String truth = "%YAML:1.0\n---\n";
|
// String truth = "%YAML:1.0\n---\n";
|
||||||
String actual = readFile(filename);
|
String actual = readFile(filename);
|
||||||
actual = actual.replaceAll("e\\+000", "e+00"); // NOTE: workaround for different platforms double representation
|
actual = actual.replaceAll("e\\+000", "e+00"); // NOTE: workaround for different platforms double representation
|
||||||
assertEquals(truth, actual);
|
assertEquals(truth, actual);
|
||||||
|
@ -10,11 +10,11 @@ import org.opencv.features2d.SIFT;
|
|||||||
import org.opencv.test.OpenCVTestCase;
|
import org.opencv.test.OpenCVTestCase;
|
||||||
import org.opencv.test.OpenCVTestRunner;
|
import org.opencv.test.OpenCVTestRunner;
|
||||||
import org.opencv.imgproc.Imgproc;
|
import org.opencv.imgproc.Imgproc;
|
||||||
import org.opencv.features2d.Feature2D;
|
import org.opencv.features2d.SIFT;
|
||||||
|
|
||||||
public class SIFTDescriptorExtractorTest extends OpenCVTestCase {
|
public class SIFTDescriptorExtractorTest extends OpenCVTestCase {
|
||||||
|
|
||||||
Feature2D extractor;
|
SIFT extractor;
|
||||||
KeyPoint keypoint;
|
KeyPoint keypoint;
|
||||||
int matSize;
|
int matSize;
|
||||||
Mat truth;
|
Mat truth;
|
||||||
@ -43,7 +43,7 @@ public class SIFTDescriptorExtractorTest extends OpenCVTestCase {
|
|||||||
117, 112, 117, 76, 117, 54, 117, 25, 29, 22, 117, 117, 16, 11, 14,
|
117, 112, 117, 76, 117, 54, 117, 25, 29, 22, 117, 117, 16, 11, 14,
|
||||||
1, 0, 0, 22, 26, 0, 0, 0, 0, 1, 4, 15, 2, 47, 8, 0, 0, 82, 56, 31,
|
1, 0, 0, 22, 26, 0, 0, 0, 0, 1, 4, 15, 2, 47, 8, 0, 0, 82, 56, 31,
|
||||||
17, 81, 12, 0, 0, 26, 23, 18, 23, 0, 0, 0, 0, 0, 0, 0, 0
|
17, 81, 12, 0, 0, 26, 23, 18, 23, 0, 0, 0, 0, 0, 0, 0, 0
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
@ -76,23 +76,23 @@ public class SIFTDescriptorExtractorTest extends OpenCVTestCase {
|
|||||||
|
|
||||||
public void testEmpty() {
|
public void testEmpty() {
|
||||||
// assertFalse(extractor.empty());
|
// assertFalse(extractor.empty());
|
||||||
fail("Not yet implemented"); //SIFT does not override empty() method
|
fail("Not yet implemented"); // SIFT does not override empty() method
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testRead() {
|
public void testReadYml() {
|
||||||
fail("Not yet implemented");
|
String filename = OpenCVTestRunner.getTempFileName("yml");
|
||||||
}
|
writeFile(filename, "%YAML:1.0\n---\nname: \"Feature2D.SIFT\"\nnfeatures: 100\nnOctaveLayers: 4\ncontrastThreshold: 5.0000000000000001e-02\nedgeThreshold: 11\nsigma: 1.7\ndescriptorType: 5\n");
|
||||||
|
|
||||||
public void testWrite() {
|
extractor.read(filename);
|
||||||
String filename = OpenCVTestRunner.getTempFileName("xml");
|
|
||||||
|
|
||||||
extractor.write(filename);
|
assertEquals(128, extractor.descriptorSize());
|
||||||
|
|
||||||
// String truth = "<?xml version=\"1.0\"?>\n<opencv_storage>\n<name>Feature2D.SIFT</name>\n<contrastThreshold>4.0000000000000001e-02</contrastThreshold>\n<edgeThreshold>10.</edgeThreshold>\n<nFeatures>0</nFeatures>\n<nOctaveLayers>3</nOctaveLayers>\n<sigma>1.6000000000000001e+00</sigma>\n</opencv_storage>\n";
|
assertEquals(100, extractor.getNFeatures());
|
||||||
String truth = "<?xml version=\"1.0\"?>\n<opencv_storage>\n</opencv_storage>\n";
|
assertEquals(4, extractor.getNOctaveLayers());
|
||||||
String actual = readFile(filename);
|
assertEquals(0.05, extractor.getContrastThreshold());
|
||||||
actual = actual.replaceAll("e([+-])0(\\d\\d)", "e$1$2"); // NOTE: workaround for different platforms double representation
|
assertEquals(11., extractor.getEdgeThreshold());
|
||||||
assertEquals(truth, actual);
|
assertEquals(1.7, extractor.getSigma());
|
||||||
|
assertEquals(5, extractor.descriptorType());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testWriteYml() {
|
public void testWriteYml() {
|
||||||
@ -100,8 +100,7 @@ public class SIFTDescriptorExtractorTest extends OpenCVTestCase {
|
|||||||
|
|
||||||
extractor.write(filename);
|
extractor.write(filename);
|
||||||
|
|
||||||
// String truth = "%YAML:1.0\n---\nname: \"Feature2D.SIFT\"\ncontrastThreshold: 4.0000000000000001e-02\nedgeThreshold: 10.\nnFeatures: 0\nnOctaveLayers: 3\nsigma: 1.6000000000000001e+00\n";
|
String truth = "%YAML:1.0\n---\nname: \"Feature2D.SIFT\"\nnfeatures: 0\nnOctaveLayers: 3\ncontrastThreshold: 4.0000000000000001e-02\nedgeThreshold: 10.\nsigma: 1.6000000000000001e+00\ndescriptorType: 5\n";
|
||||||
String truth = "%YAML:1.0\n---\n";
|
|
||||||
String actual = readFile(filename);
|
String actual = readFile(filename);
|
||||||
actual = actual.replaceAll("e([+-])0(\\d\\d)", "e$1$2"); // NOTE: workaround for different platforms double representation
|
actual = actual.replaceAll("e([+-])0(\\d\\d)", "e$1$2"); // NOTE: workaround for different platforms double representation
|
||||||
assertEquals(truth, actual);
|
assertEquals(truth, actual);
|
||||||
|
@ -11,12 +11,12 @@ import org.opencv.core.KeyPoint;
|
|||||||
import org.opencv.test.OpenCVTestCase;
|
import org.opencv.test.OpenCVTestCase;
|
||||||
import org.opencv.test.OpenCVTestRunner;
|
import org.opencv.test.OpenCVTestRunner;
|
||||||
import org.opencv.imgproc.Imgproc;
|
import org.opencv.imgproc.Imgproc;
|
||||||
import org.opencv.features2d.Feature2D;
|
|
||||||
import org.opencv.features2d.SimpleBlobDetector;
|
import org.opencv.features2d.SimpleBlobDetector;
|
||||||
|
import org.opencv.features2d.SimpleBlobDetector_Params;
|
||||||
|
|
||||||
public class SIMPLEBLOBFeatureDetectorTest extends OpenCVTestCase {
|
public class SIMPLEBLOBFeatureDetectorTest extends OpenCVTestCase {
|
||||||
|
|
||||||
Feature2D detector;
|
SimpleBlobDetector detector;
|
||||||
int matSize;
|
int matSize;
|
||||||
KeyPoint[] truth;
|
KeyPoint[] truth;
|
||||||
|
|
||||||
@ -47,8 +47,8 @@ public class SIMPLEBLOBFeatureDetectorTest extends OpenCVTestCase {
|
|||||||
detector = SimpleBlobDetector.create();
|
detector = SimpleBlobDetector.create();
|
||||||
matSize = 200;
|
matSize = 200;
|
||||||
truth = new KeyPoint[] {
|
truth = new KeyPoint[] {
|
||||||
new KeyPoint( 140, 100, 41.036568f, -1, 0, 0, -1),
|
new KeyPoint(140, 100, 41.036568f, -1, 0, 0, -1),
|
||||||
new KeyPoint( 60, 100, 48.538486f, -1, 0, 0, -1),
|
new KeyPoint(60, 100, 48.538486f, -1, 0, 0, -1),
|
||||||
new KeyPoint(100, 60, 36.769554f, -1, 0, 0, -1),
|
new KeyPoint(100, 60, 36.769554f, -1, 0, 0, -1),
|
||||||
new KeyPoint(100, 140, 28.635643f, -1, 0, 0, -1),
|
new KeyPoint(100, 140, 28.635643f, -1, 0, 0, -1),
|
||||||
new KeyPoint(100, 100, 20.880613f, -1, 0, 0, -1)
|
new KeyPoint(100, 100, 20.880613f, -1, 0, 0, -1)
|
||||||
@ -91,16 +91,38 @@ public class SIMPLEBLOBFeatureDetectorTest extends OpenCVTestCase {
|
|||||||
fail("Not yet implemented");
|
fail("Not yet implemented");
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testRead() {
|
public void testReadYml() {
|
||||||
Mat img = getTestImg();
|
Mat img = getTestImg();
|
||||||
|
|
||||||
MatOfKeyPoint keypoints1 = new MatOfKeyPoint();
|
MatOfKeyPoint keypoints1 = new MatOfKeyPoint();
|
||||||
detector.detect(img, keypoints1);
|
detector.detect(img, keypoints1);
|
||||||
|
|
||||||
String filename = OpenCVTestRunner.getTempFileName("yml");
|
String filename = OpenCVTestRunner.getTempFileName("yml");
|
||||||
writeFile(filename, "%YAML:1.0\nthresholdStep: 10\nminThreshold: 50\nmaxThreshold: 220\nminRepeatability: 2\nfilterByArea: true\nminArea: 800\nmaxArea: 5000\n");
|
writeFile(filename, "%YAML:1.0\nthresholdStep: 10.0\nminThreshold: 50\nmaxThreshold: 220\nminRepeatability: 2\nminDistBetweenBlobs: 10.\nfilterByColor: 1\nblobColor: 0\nfilterByArea: 1\nminArea: 800\nmaxArea: 6000\nfilterByCircularity: 0\nminCircularity: 0.7\nmaxCircularity: 10.\nfilterByInertia: 1\nminInertiaRatio: 0.2\nmaxInertiaRatio: 11.\nfilterByConvexity: true\nminConvexity: 0.9\nmaxConvexity: 12.\n");
|
||||||
detector.read(filename);
|
detector.read(filename);
|
||||||
|
|
||||||
|
SimpleBlobDetector_Params params = detector.getParams();
|
||||||
|
assertEquals(10.0f, params.get_thresholdStep());
|
||||||
|
assertEquals(50f, params.get_minThreshold());
|
||||||
|
assertEquals(220f, params.get_maxThreshold());
|
||||||
|
assertEquals(2, params.get_minRepeatability());
|
||||||
|
assertEquals(10.0f, params.get_minDistBetweenBlobs());
|
||||||
|
assertEquals(true, params.get_filterByColor());
|
||||||
|
// FIXME: blobColor field has uchar type in C++ and cannot be automatically wrapped to Java as it does not support unsigned types
|
||||||
|
//assertEquals(0, params.get_blobColor());
|
||||||
|
assertEquals(true, params.get_filterByArea());
|
||||||
|
assertEquals(800f, params.get_minArea());
|
||||||
|
assertEquals(6000f, params.get_maxArea());
|
||||||
|
assertEquals(false, params.get_filterByCircularity());
|
||||||
|
assertEquals(0.7f, params.get_minCircularity());
|
||||||
|
assertEquals(10.0f, params.get_maxCircularity());
|
||||||
|
assertEquals(true, params.get_filterByInertia());
|
||||||
|
assertEquals(0.2f, params.get_minInertiaRatio());
|
||||||
|
assertEquals(11.0f, params.get_maxInertiaRatio());
|
||||||
|
assertEquals(true, params.get_filterByConvexity());
|
||||||
|
assertEquals(0.9f, params.get_minConvexity());
|
||||||
|
assertEquals(12.0f, params.get_maxConvexity());
|
||||||
|
|
||||||
MatOfKeyPoint keypoints2 = new MatOfKeyPoint();
|
MatOfKeyPoint keypoints2 = new MatOfKeyPoint();
|
||||||
detector.detect(img, keypoints2);
|
detector.detect(img, keypoints2);
|
||||||
|
|
||||||
|
@ -1,133 +0,0 @@
|
|||||||
package org.opencv.test.features2d;
|
|
||||||
|
|
||||||
import java.util.Arrays;
|
|
||||||
|
|
||||||
import org.opencv.core.CvType;
|
|
||||||
import org.opencv.core.Mat;
|
|
||||||
import org.opencv.core.MatOfKeyPoint;
|
|
||||||
import org.opencv.core.Point;
|
|
||||||
import org.opencv.core.Scalar;
|
|
||||||
import org.opencv.core.KeyPoint;
|
|
||||||
import org.opencv.test.OpenCVTestCase;
|
|
||||||
import org.opencv.test.OpenCVTestRunner;
|
|
||||||
import org.opencv.imgproc.Imgproc;
|
|
||||||
import org.opencv.features2d.Feature2D;
|
|
||||||
|
|
||||||
public class STARFeatureDetectorTest extends OpenCVTestCase {
|
|
||||||
|
|
||||||
Feature2D detector;
|
|
||||||
int matSize;
|
|
||||||
KeyPoint[] truth;
|
|
||||||
|
|
||||||
private Mat getMaskImg() {
|
|
||||||
Mat mask = new Mat(matSize, matSize, CvType.CV_8U, new Scalar(255));
|
|
||||||
Mat right = mask.submat(0, matSize, matSize / 2, matSize);
|
|
||||||
right.setTo(new Scalar(0));
|
|
||||||
return mask;
|
|
||||||
}
|
|
||||||
|
|
||||||
private Mat getTestImg() {
|
|
||||||
Scalar color = new Scalar(0);
|
|
||||||
int center = matSize / 2;
|
|
||||||
int radius = 6;
|
|
||||||
int offset = 40;
|
|
||||||
|
|
||||||
Mat img = new Mat(matSize, matSize, CvType.CV_8U, new Scalar(255));
|
|
||||||
Imgproc.circle(img, new Point(center - offset, center), radius, color, -1);
|
|
||||||
Imgproc.circle(img, new Point(center + offset, center), radius, color, -1);
|
|
||||||
Imgproc.circle(img, new Point(center, center - offset), radius, color, -1);
|
|
||||||
Imgproc.circle(img, new Point(center, center + offset), radius, color, -1);
|
|
||||||
Imgproc.circle(img, new Point(center, center), radius, color, -1);
|
|
||||||
return img;
|
|
||||||
}
|
|
||||||
|
|
||||||
protected void setUp() throws Exception {
|
|
||||||
super.setUp();
|
|
||||||
detector = createClassInstance(XFEATURES2D+"StarDetector", DEFAULT_FACTORY, null, null);
|
|
||||||
matSize = 200;
|
|
||||||
truth = new KeyPoint[] {
|
|
||||||
new KeyPoint( 95, 80, 22, -1, 31.5957f, 0, -1),
|
|
||||||
new KeyPoint(105, 80, 22, -1, 31.5957f, 0, -1),
|
|
||||||
new KeyPoint( 80, 95, 22, -1, 31.5957f, 0, -1),
|
|
||||||
new KeyPoint(120, 95, 22, -1, 31.5957f, 0, -1),
|
|
||||||
new KeyPoint(100, 100, 8, -1, 30.f, 0, -1),
|
|
||||||
new KeyPoint( 80, 105, 22, -1, 31.5957f, 0, -1),
|
|
||||||
new KeyPoint(120, 105, 22, -1, 31.5957f, 0, -1),
|
|
||||||
new KeyPoint( 95, 120, 22, -1, 31.5957f, 0, -1),
|
|
||||||
new KeyPoint(105, 120, 22, -1, 31.5957f, 0, -1)
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testCreate() {
|
|
||||||
assertNotNull(detector);
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testDetectListOfMatListOfListOfKeyPoint() {
|
|
||||||
fail("Not yet implemented");
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testDetectListOfMatListOfListOfKeyPointListOfMat() {
|
|
||||||
fail("Not yet implemented");
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testDetectMatListOfKeyPoint() {
|
|
||||||
Mat img = getTestImg();
|
|
||||||
MatOfKeyPoint keypoints = new MatOfKeyPoint();
|
|
||||||
|
|
||||||
detector.detect(img, keypoints);
|
|
||||||
|
|
||||||
assertListKeyPointEquals(Arrays.asList(truth), keypoints.toList(), EPS);
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testDetectMatListOfKeyPointMat() {
|
|
||||||
Mat img = getTestImg();
|
|
||||||
Mat mask = getMaskImg();
|
|
||||||
MatOfKeyPoint keypoints = new MatOfKeyPoint();
|
|
||||||
|
|
||||||
detector.detect(img, keypoints, mask);
|
|
||||||
|
|
||||||
assertListKeyPointEquals(Arrays.asList(truth[0], truth[2], truth[5], truth[7]), keypoints.toList(), EPS);
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testEmpty() {
|
|
||||||
// assertFalse(detector.empty());
|
|
||||||
fail("Not yet implemented");
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testRead() {
|
|
||||||
Mat img = getTestImg();
|
|
||||||
|
|
||||||
MatOfKeyPoint keypoints1 = new MatOfKeyPoint();
|
|
||||||
detector.detect(img, keypoints1);
|
|
||||||
|
|
||||||
String filename = OpenCVTestRunner.getTempFileName("yml");
|
|
||||||
writeFile(filename, "%YAML:1.0\n---\nmaxSize: 45\nresponseThreshold: 150\nlineThresholdProjected: 10\nlineThresholdBinarized: 8\nsuppressNonmaxSize: 5\n");
|
|
||||||
detector.read(filename);
|
|
||||||
|
|
||||||
MatOfKeyPoint keypoints2 = new MatOfKeyPoint();
|
|
||||||
detector.detect(img, keypoints2);
|
|
||||||
|
|
||||||
assertTrue(keypoints2.total() <= keypoints1.total());
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testWrite() {
|
|
||||||
String filename = OpenCVTestRunner.getTempFileName("xml");
|
|
||||||
|
|
||||||
detector.write(filename);
|
|
||||||
|
|
||||||
// String truth = "<?xml version=\"1.0\"?>\n<opencv_storage>\n<name>Feature2D.STAR</name>\n<lineThresholdBinarized>8</lineThresholdBinarized>\n<lineThresholdProjected>10</lineThresholdProjected>\n<maxSize>45</maxSize>\n<responseThreshold>30</responseThreshold>\n<suppressNonmaxSize>5</suppressNonmaxSize>\n</opencv_storage>\n";
|
|
||||||
String truth = "<?xml version=\"1.0\"?>\n<opencv_storage>\n</opencv_storage>\n";
|
|
||||||
assertEquals(truth, readFile(filename));
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testWriteYml() {
|
|
||||||
String filename = OpenCVTestRunner.getTempFileName("yml");
|
|
||||||
|
|
||||||
detector.write(filename);
|
|
||||||
|
|
||||||
// String truth = "%YAML:1.0\n---\nname: \"Feature2D.STAR\"\nlineThresholdBinarized: 8\nlineThresholdProjected: 10\nmaxSize: 45\nresponseThreshold: 30\nsuppressNonmaxSize: 5\n";
|
|
||||||
String truth = "%YAML:1.0\n---\n";
|
|
||||||
assertEquals(truth, readFile(filename));
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
@ -1,119 +0,0 @@
|
|||||||
package org.opencv.test.features2d;
|
|
||||||
|
|
||||||
import org.opencv.core.CvType;
|
|
||||||
import org.opencv.core.Mat;
|
|
||||||
import org.opencv.core.MatOfKeyPoint;
|
|
||||||
import org.opencv.core.Point;
|
|
||||||
import org.opencv.core.Scalar;
|
|
||||||
import org.opencv.core.KeyPoint;
|
|
||||||
import org.opencv.test.OpenCVTestCase;
|
|
||||||
import org.opencv.test.OpenCVTestRunner;
|
|
||||||
import org.opencv.imgproc.Imgproc;
|
|
||||||
import org.opencv.features2d.Feature2D;
|
|
||||||
|
|
||||||
public class SURFDescriptorExtractorTest extends OpenCVTestCase {
|
|
||||||
|
|
||||||
Feature2D extractor;
|
|
||||||
int matSize;
|
|
||||||
|
|
||||||
private Mat getTestImg() {
|
|
||||||
Mat cross = new Mat(matSize, matSize, CvType.CV_8U, new Scalar(255));
|
|
||||||
Imgproc.line(cross, new Point(20, matSize / 2), new Point(matSize - 21, matSize / 2), new Scalar(100), 2);
|
|
||||||
Imgproc.line(cross, new Point(matSize / 2, 20), new Point(matSize / 2, matSize - 21), new Scalar(100), 2);
|
|
||||||
|
|
||||||
return cross;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
protected void setUp() throws Exception {
|
|
||||||
super.setUp();
|
|
||||||
|
|
||||||
Class[] cParams = {double.class, int.class, int.class, boolean.class, boolean.class};
|
|
||||||
Object[] oValues = {100, 2, 4, true, false};
|
|
||||||
extractor = createClassInstance(XFEATURES2D+"SURF", DEFAULT_FACTORY, cParams, oValues);
|
|
||||||
|
|
||||||
matSize = 100;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testComputeListOfMatListOfListOfKeyPointListOfMat() {
|
|
||||||
fail("Not yet implemented");
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testComputeMatListOfKeyPointMat() {
|
|
||||||
KeyPoint point = new KeyPoint(55.775577545166016f, 44.224422454833984f, 16, 9.754629f, 8617.863f, 1, -1);
|
|
||||||
MatOfKeyPoint keypoints = new MatOfKeyPoint(point);
|
|
||||||
Mat img = getTestImg();
|
|
||||||
Mat descriptors = new Mat();
|
|
||||||
|
|
||||||
extractor.compute(img, keypoints, descriptors);
|
|
||||||
|
|
||||||
Mat truth = new Mat(1, 128, CvType.CV_32FC1) {
|
|
||||||
{
|
|
||||||
put(0, 0,
|
|
||||||
0, 0, 0, 0, 0, 0, 0, 0, 0.058821894, 0.058821894, -0.045962855, 0.046261817, 0.0085156476,
|
|
||||||
0.0085754395, -0.0064509804, 0.0064509804, 0.00044069235, 0.00044069235, 0, 0, 0.00025723741,
|
|
||||||
0.00025723741, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.00025723741, 0.00025723741, -0.00044069235,
|
|
||||||
0.00044069235, 0, 0, 0.36278215, 0.36278215, -0.24688604, 0.26173124, 0.052068226, 0.052662034,
|
|
||||||
-0.032815345, 0.032815345, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -0.0064523756,
|
|
||||||
0.0064523756, 0.0082002236, 0.0088908644, -0.059001274, 0.059001274, 0.045789491, 0.04648013,
|
|
||||||
0.11961588, 0.22789426, -0.01322381, 0.18291828, -0.14042182, 0.23973691, 0.073782086, 0.23769434,
|
|
||||||
-0.027880307, 0.027880307, 0.049587864, 0.049587864, -0.33991757, 0.33991757, 0.21437603, 0.21437603,
|
|
||||||
-0.0020763327, 0.0020763327, 0.006245892, 0.006245892, -0.04067041, 0.04067041, 0.019361559,
|
|
||||||
0.019361559, 0, 0, -0.0035977389, 0.0035977389, 0, 0, -0.00099993451, 0.00099993451, 0.040670406,
|
|
||||||
0.040670406, -0.019361559, 0.019361559, 0.006245892, 0.006245892, -0.0020763327, 0.0020763327,
|
|
||||||
-0.00034532088, 0.00034532088, 0, 0, 0, 0, 0.00034532088, 0.00034532088, -0.00099993451,
|
|
||||||
0.00099993451, 0, 0, 0, 0, 0.0035977389, 0.0035977389
|
|
||||||
);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
assertMatEqual(truth, descriptors, EPS);
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testCreate() {
|
|
||||||
assertNotNull(extractor);
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testDescriptorSize() {
|
|
||||||
assertEquals(128, extractor.descriptorSize());
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testDescriptorType() {
|
|
||||||
assertEquals(CvType.CV_32F, extractor.descriptorType());
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testEmpty() {
|
|
||||||
// assertFalse(extractor.empty());
|
|
||||||
fail("Not yet implemented");
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testRead() {
|
|
||||||
String filename = OpenCVTestRunner.getTempFileName("yml");
|
|
||||||
writeFile(filename, "%YAML:1.0\n---\nnOctaves: 4\nnOctaveLayers: 2\nextended: 1\nupright: 0\n");
|
|
||||||
|
|
||||||
extractor.read(filename);
|
|
||||||
|
|
||||||
assertEquals(128, extractor.descriptorSize());
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testWrite() {
|
|
||||||
String filename = OpenCVTestRunner.getTempFileName("xml");
|
|
||||||
|
|
||||||
extractor.write(filename);
|
|
||||||
|
|
||||||
// String truth = "<?xml version=\"1.0\"?>\n<opencv_storage>\n<name>Feature2D.SURF</name>\n<extended>1</extended>\n<hessianThreshold>100.</hessianThreshold>\n<nOctaveLayers>2</nOctaveLayers>\n<nOctaves>4</nOctaves>\n<upright>0</upright>\n</opencv_storage>\n";
|
|
||||||
String truth = "<?xml version=\"1.0\"?>\n<opencv_storage>\n</opencv_storage>\n";
|
|
||||||
assertEquals(truth, readFile(filename));
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testWriteYml() {
|
|
||||||
String filename = OpenCVTestRunner.getTempFileName("yml");
|
|
||||||
|
|
||||||
extractor.write(filename);
|
|
||||||
|
|
||||||
// String truth = "%YAML:1.0\n---\nname: \"Feature2D.SURF\"\nextended: 1\nhessianThreshold: 100.\nnOctaveLayers: 2\nnOctaves: 4\nupright: 0\n";
|
|
||||||
String truth = "%YAML:1.0\n---\n";
|
|
||||||
assertEquals(truth, readFile(filename));
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
@ -1,175 +0,0 @@
|
|||||||
package org.opencv.test.features2d;
|
|
||||||
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.Arrays;
|
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.Comparator;
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
import org.opencv.core.CvType;
|
|
||||||
import org.opencv.core.Mat;
|
|
||||||
import org.opencv.core.MatOfKeyPoint;
|
|
||||||
import org.opencv.core.Point;
|
|
||||||
import org.opencv.core.Scalar;
|
|
||||||
import org.opencv.core.KeyPoint;
|
|
||||||
import org.opencv.test.OpenCVTestCase;
|
|
||||||
import org.opencv.test.OpenCVTestRunner;
|
|
||||||
import org.opencv.imgproc.Imgproc;
|
|
||||||
import org.opencv.features2d.Feature2D;
|
|
||||||
|
|
||||||
public class SURFFeatureDetectorTest extends OpenCVTestCase {
|
|
||||||
|
|
||||||
Feature2D detector;
|
|
||||||
int matSize;
|
|
||||||
KeyPoint[] truth;
|
|
||||||
|
|
||||||
private Mat getMaskImg() {
|
|
||||||
Mat mask = new Mat(matSize, matSize, CvType.CV_8U, new Scalar(255));
|
|
||||||
Mat right = mask.submat(0, matSize, matSize / 2, matSize);
|
|
||||||
right.setTo(new Scalar(0));
|
|
||||||
return mask;
|
|
||||||
}
|
|
||||||
|
|
||||||
private Mat getTestImg() {
|
|
||||||
Mat cross = new Mat(matSize, matSize, CvType.CV_8U, new Scalar(255));
|
|
||||||
Imgproc.line(cross, new Point(20, matSize / 2), new Point(matSize - 21, matSize / 2), new Scalar(100), 2);
|
|
||||||
Imgproc.line(cross, new Point(matSize / 2, 20), new Point(matSize / 2, matSize - 21), new Scalar(100), 2);
|
|
||||||
|
|
||||||
return cross;
|
|
||||||
}
|
|
||||||
|
|
||||||
private void order(List<KeyPoint> points) {
|
|
||||||
Collections.sort(points, new Comparator<KeyPoint>() {
|
|
||||||
public int compare(KeyPoint p1, KeyPoint p2) {
|
|
||||||
if (p1.angle < p2.angle)
|
|
||||||
return -1;
|
|
||||||
if (p1.angle > p2.angle)
|
|
||||||
return 1;
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
protected void setUp() throws Exception {
|
|
||||||
super.setUp();
|
|
||||||
detector = createClassInstance(XFEATURES2D+"SURF", DEFAULT_FACTORY, null, null);
|
|
||||||
matSize = 100;
|
|
||||||
truth = new KeyPoint[] {
|
|
||||||
new KeyPoint(55.775578f, 55.775578f, 16, 80.245735f, 8617.8633f, 0, -1),
|
|
||||||
new KeyPoint(44.224422f, 55.775578f, 16, 170.24574f, 8617.8633f, 0, -1),
|
|
||||||
new KeyPoint(44.224422f, 44.224422f, 16, 260.24573f, 8617.8633f, 0, -1),
|
|
||||||
new KeyPoint(55.775578f, 44.224422f, 16, 350.24573f, 8617.8633f, 0, -1)
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testCreate() {
|
|
||||||
assertNotNull(detector);
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testDetectListOfMatListOfListOfKeyPoint() {
|
|
||||||
|
|
||||||
setProperty(detector, "hessianThreshold", "double", 8000);
|
|
||||||
setProperty(detector, "nOctaves", "int", 3);
|
|
||||||
setProperty(detector, "nOctaveLayers", "int", 4);
|
|
||||||
setProperty(detector, "upright", "boolean", false);
|
|
||||||
|
|
||||||
List<MatOfKeyPoint> keypoints = new ArrayList<MatOfKeyPoint>();
|
|
||||||
Mat cross = getTestImg();
|
|
||||||
List<Mat> crosses = new ArrayList<Mat>(3);
|
|
||||||
crosses.add(cross);
|
|
||||||
crosses.add(cross);
|
|
||||||
crosses.add(cross);
|
|
||||||
|
|
||||||
detector.detect(crosses, keypoints);
|
|
||||||
|
|
||||||
assertEquals(3, keypoints.size());
|
|
||||||
|
|
||||||
for (MatOfKeyPoint mkp : keypoints) {
|
|
||||||
List<KeyPoint> lkp = mkp.toList();
|
|
||||||
order(lkp);
|
|
||||||
assertListKeyPointEquals(Arrays.asList(truth), lkp, EPS);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testDetectListOfMatListOfListOfKeyPointListOfMat() {
|
|
||||||
fail("Not yet implemented");
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testDetectMatListOfKeyPoint() {
|
|
||||||
|
|
||||||
setProperty(detector, "hessianThreshold", "double", 8000);
|
|
||||||
setProperty(detector, "nOctaves", "int", 3);
|
|
||||||
setProperty(detector, "nOctaveLayers", "int", 4);
|
|
||||||
setProperty(detector, "upright", "boolean", false);
|
|
||||||
|
|
||||||
MatOfKeyPoint keypoints = new MatOfKeyPoint();
|
|
||||||
Mat cross = getTestImg();
|
|
||||||
|
|
||||||
detector.detect(cross, keypoints);
|
|
||||||
|
|
||||||
List<KeyPoint> lkp = keypoints.toList();
|
|
||||||
order(lkp);
|
|
||||||
assertListKeyPointEquals(Arrays.asList(truth), lkp, EPS);
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testDetectMatListOfKeyPointMat() {
|
|
||||||
|
|
||||||
setProperty(detector, "hessianThreshold", "double", 8000);
|
|
||||||
setProperty(detector, "nOctaves", "int", 3);
|
|
||||||
setProperty(detector, "nOctaveLayers", "int", 4);
|
|
||||||
setProperty(detector, "upright", "boolean", false);
|
|
||||||
|
|
||||||
Mat img = getTestImg();
|
|
||||||
Mat mask = getMaskImg();
|
|
||||||
MatOfKeyPoint keypoints = new MatOfKeyPoint();
|
|
||||||
|
|
||||||
detector.detect(img, keypoints, mask);
|
|
||||||
|
|
||||||
List<KeyPoint> lkp = keypoints.toList();
|
|
||||||
order(lkp);
|
|
||||||
assertListKeyPointEquals(Arrays.asList(truth[1], truth[2]), lkp, EPS);
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testEmpty() {
|
|
||||||
// assertFalse(detector.empty());
|
|
||||||
fail("Not yet implemented");
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testRead() {
|
|
||||||
Mat cross = getTestImg();
|
|
||||||
|
|
||||||
MatOfKeyPoint keypoints1 = new MatOfKeyPoint();
|
|
||||||
detector.detect(cross, keypoints1);
|
|
||||||
|
|
||||||
String filename = OpenCVTestRunner.getTempFileName("yml");
|
|
||||||
writeFile(filename, "%YAML:1.0\n---\nhessianThreshold: 8000.\noctaves: 3\noctaveLayers: 4\nupright: 0\n");
|
|
||||||
detector.read(filename);
|
|
||||||
|
|
||||||
MatOfKeyPoint keypoints2 = new MatOfKeyPoint();
|
|
||||||
detector.detect(cross, keypoints2);
|
|
||||||
|
|
||||||
assertTrue(keypoints2.total() <= keypoints1.total());
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testWrite() {
|
|
||||||
String filename = OpenCVTestRunner.getTempFileName("xml");
|
|
||||||
|
|
||||||
detector.write(filename);
|
|
||||||
|
|
||||||
// String truth = "<?xml version=\"1.0\"?>\n<opencv_storage>\n<name>Feature2D.SURF</name>\n<extended>0</extended>\n<hessianThreshold>100.</hessianThreshold>\n<nOctaveLayers>3</nOctaveLayers>\n<nOctaves>4</nOctaves>\n<upright>0</upright>\n</opencv_storage>\n";
|
|
||||||
String truth = "<?xml version=\"1.0\"?>\n<opencv_storage>\n</opencv_storage>\n";
|
|
||||||
assertEquals(truth, readFile(filename));
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testWriteYml() {
|
|
||||||
String filename = OpenCVTestRunner.getTempFileName("yml");
|
|
||||||
|
|
||||||
detector.write(filename);
|
|
||||||
|
|
||||||
// String truth = "%YAML:1.0\n---\nname: \"Feature2D.SURF\"\nextended: 0\nhessianThreshold: 100.\nnOctaveLayers: 3\nnOctaves: 4\nupright: 0\n";
|
|
||||||
String truth = "%YAML:1.0\n---\n";
|
|
||||||
assertEquals(truth, readFile(filename));
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
@ -1,4 +1,10 @@
|
|||||||
{
|
{
|
||||||
|
"ManualFuncs" : {
|
||||||
|
"SimpleBlobDetector": {
|
||||||
|
"setParams": { "declaration" : [""], "implementation" : [""] },
|
||||||
|
"getParams": { "declaration" : [""], "implementation" : [""] }
|
||||||
|
}
|
||||||
|
},
|
||||||
"enum_fix" : {
|
"enum_fix" : {
|
||||||
"FastFeatureDetector" : { "DetectorType": "FastDetectorType" },
|
"FastFeatureDetector" : { "DetectorType": "FastDetectorType" },
|
||||||
"AgastFeatureDetector" : { "DetectorType": "AgastDetectorType" }
|
"AgastFeatureDetector" : { "DetectorType": "AgastDetectorType" }
|
||||||
|
@ -7948,6 +7948,27 @@ public:
|
|||||||
: threshold(_threshold), nonmaxSuppression(_nonmaxSuppression), type(_type)
|
: threshold(_threshold), nonmaxSuppression(_nonmaxSuppression), type(_type)
|
||||||
{}
|
{}
|
||||||
|
|
||||||
|
void read( const FileNode& fn) CV_OVERRIDE
|
||||||
|
{
|
||||||
|
// if node is empty, keep previous value
|
||||||
|
if (!fn["threshold"].empty())
|
||||||
|
fn["threshold"] >> threshold;
|
||||||
|
if (!fn["nonmaxSuppression"].empty())
|
||||||
|
fn["nonmaxSuppression"] >> nonmaxSuppression;
|
||||||
|
if (!fn["type"].empty())
|
||||||
|
fn["type"] >> type;
|
||||||
|
}
|
||||||
|
void write( FileStorage& fs) const CV_OVERRIDE
|
||||||
|
{
|
||||||
|
if(fs.isOpened())
|
||||||
|
{
|
||||||
|
fs << "name" << getDefaultName();
|
||||||
|
fs << "threshold" << threshold;
|
||||||
|
fs << "nonmaxSuppression" << nonmaxSuppression;
|
||||||
|
fs << "type" << type;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
void detect( InputArray _image, std::vector<KeyPoint>& keypoints, InputArray _mask ) CV_OVERRIDE
|
void detect( InputArray _image, std::vector<KeyPoint>& keypoints, InputArray _mask ) CV_OVERRIDE
|
||||||
{
|
{
|
||||||
CV_INSTRUMENT_REGION();
|
CV_INSTRUMENT_REGION();
|
||||||
|
@ -207,6 +207,7 @@ namespace cv
|
|||||||
void write(FileStorage& fs) const CV_OVERRIDE
|
void write(FileStorage& fs) const CV_OVERRIDE
|
||||||
{
|
{
|
||||||
writeFormat(fs);
|
writeFormat(fs);
|
||||||
|
fs << "name" << getDefaultName();
|
||||||
fs << "descriptor" << descriptor;
|
fs << "descriptor" << descriptor;
|
||||||
fs << "descriptor_channels" << descriptor_channels;
|
fs << "descriptor_channels" << descriptor_channels;
|
||||||
fs << "descriptor_size" << descriptor_size;
|
fs << "descriptor_size" << descriptor_size;
|
||||||
@ -218,13 +219,21 @@ namespace cv
|
|||||||
|
|
||||||
void read(const FileNode& fn) CV_OVERRIDE
|
void read(const FileNode& fn) CV_OVERRIDE
|
||||||
{
|
{
|
||||||
descriptor = static_cast<DescriptorType>((int)fn["descriptor"]);
|
// if node is empty, keep previous value
|
||||||
descriptor_channels = (int)fn["descriptor_channels"];
|
if (!fn["descriptor"].empty())
|
||||||
descriptor_size = (int)fn["descriptor_size"];
|
descriptor = static_cast<DescriptorType>((int)fn["descriptor"]);
|
||||||
threshold = (float)fn["threshold"];
|
if (!fn["descriptor_channels"].empty())
|
||||||
octaves = (int)fn["octaves"];
|
descriptor_channels = (int)fn["descriptor_channels"];
|
||||||
sublevels = (int)fn["sublevels"];
|
if (!fn["descriptor_size"].empty())
|
||||||
diffusivity = static_cast<KAZE::DiffusivityType>((int)fn["diffusivity"]);
|
descriptor_size = (int)fn["descriptor_size"];
|
||||||
|
if (!fn["threshold"].empty())
|
||||||
|
threshold = (float)fn["threshold"];
|
||||||
|
if (!fn["octaves"].empty())
|
||||||
|
octaves = (int)fn["octaves"];
|
||||||
|
if (!fn["sublevels"].empty())
|
||||||
|
sublevels = (int)fn["sublevels"];
|
||||||
|
if (!fn["diffusivity"].empty())
|
||||||
|
diffusivity = static_cast<KAZE::DiffusivityType>((int)fn["diffusivity"]);
|
||||||
}
|
}
|
||||||
|
|
||||||
DescriptorType descriptor;
|
DescriptorType descriptor;
|
||||||
|
@ -71,6 +71,37 @@ public:
|
|||||||
virtual void read( const FileNode& fn ) CV_OVERRIDE;
|
virtual void read( const FileNode& fn ) CV_OVERRIDE;
|
||||||
virtual void write( FileStorage& fs ) const CV_OVERRIDE;
|
virtual void write( FileStorage& fs ) const CV_OVERRIDE;
|
||||||
|
|
||||||
|
void setParams(const SimpleBlobDetector::Params& _params ) CV_OVERRIDE {
|
||||||
|
SimpleBlobDetectorImpl::validateParameters(_params);
|
||||||
|
params = _params;
|
||||||
|
}
|
||||||
|
|
||||||
|
SimpleBlobDetector::Params getParams() const CV_OVERRIDE { return params; }
|
||||||
|
|
||||||
|
static void validateParameters(const SimpleBlobDetector::Params& p)
|
||||||
|
{
|
||||||
|
if (p.thresholdStep <= 0)
|
||||||
|
CV_Error(Error::StsBadArg, "thresholdStep>0");
|
||||||
|
|
||||||
|
if (p.minThreshold > p.maxThreshold || p.minThreshold <= 0)
|
||||||
|
CV_Error(Error::StsBadArg, "0<minThreshold<=maxThreshold");
|
||||||
|
|
||||||
|
if (p.minDistBetweenBlobs <=0 )
|
||||||
|
CV_Error(Error::StsBadArg, "minDistBetweenBlobs>0");
|
||||||
|
|
||||||
|
if (p.minArea > p.maxArea || p.minArea <=0)
|
||||||
|
CV_Error(Error::StsBadArg, "0<minArea<=maxArea");
|
||||||
|
|
||||||
|
if (p.minCircularity > p.maxCircularity || p.minCircularity <= 0)
|
||||||
|
CV_Error(Error::StsBadArg, "0<minCircularity<=maxCircularity");
|
||||||
|
|
||||||
|
if (p.minInertiaRatio > p.maxInertiaRatio || p.minInertiaRatio <= 0)
|
||||||
|
CV_Error(Error::StsBadArg, "0<minInertiaRatio<=maxInertiaRatio");
|
||||||
|
|
||||||
|
if (p.minConvexity > p.maxConvexity || p.minConvexity <= 0)
|
||||||
|
CV_Error(Error::StsBadArg, "0<minConvexity<=maxConvexity");
|
||||||
|
}
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
struct CV_EXPORTS Center
|
struct CV_EXPORTS Center
|
||||||
{
|
{
|
||||||
@ -192,7 +223,10 @@ params(parameters)
|
|||||||
|
|
||||||
void SimpleBlobDetectorImpl::read( const cv::FileNode& fn )
|
void SimpleBlobDetectorImpl::read( const cv::FileNode& fn )
|
||||||
{
|
{
|
||||||
params.read(fn);
|
SimpleBlobDetector::Params rp;
|
||||||
|
rp.read(fn);
|
||||||
|
SimpleBlobDetectorImpl::validateParameters(rp);
|
||||||
|
params = rp;
|
||||||
}
|
}
|
||||||
|
|
||||||
void SimpleBlobDetectorImpl::write( cv::FileStorage& fs ) const
|
void SimpleBlobDetectorImpl::write( cv::FileStorage& fs ) const
|
||||||
@ -455,6 +489,7 @@ const std::vector<std::vector<Point> >& SimpleBlobDetectorImpl::getBlobContours(
|
|||||||
|
|
||||||
Ptr<SimpleBlobDetector> SimpleBlobDetector::create(const SimpleBlobDetector::Params& params)
|
Ptr<SimpleBlobDetector> SimpleBlobDetector::create(const SimpleBlobDetector::Params& params)
|
||||||
{
|
{
|
||||||
|
SimpleBlobDetectorImpl::validateParameters(params);
|
||||||
return makePtr<SimpleBlobDetectorImpl>(params);
|
return makePtr<SimpleBlobDetectorImpl>(params);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -54,7 +54,7 @@ namespace cv
|
|||||||
class BRISK_Impl CV_FINAL : public BRISK
|
class BRISK_Impl CV_FINAL : public BRISK
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
explicit BRISK_Impl(int thresh=30, int octaves=3, float patternScale=1.0f);
|
explicit BRISK_Impl(int _threshold=30, int _octaves=3, float _patternScale=1.0f);
|
||||||
// custom setup
|
// custom setup
|
||||||
explicit BRISK_Impl(const std::vector<float> &radiusList, const std::vector<int> &numberList,
|
explicit BRISK_Impl(const std::vector<float> &radiusList, const std::vector<int> &numberList,
|
||||||
float dMax=5.85f, float dMin=8.2f, const std::vector<int> indexChange=std::vector<int>());
|
float dMax=5.85f, float dMin=8.2f, const std::vector<int> indexChange=std::vector<int>());
|
||||||
@ -65,6 +65,9 @@ public:
|
|||||||
|
|
||||||
virtual ~BRISK_Impl();
|
virtual ~BRISK_Impl();
|
||||||
|
|
||||||
|
void read( const FileNode& fn) CV_OVERRIDE;
|
||||||
|
void write( FileStorage& fs) const CV_OVERRIDE;
|
||||||
|
|
||||||
int descriptorSize() const CV_OVERRIDE
|
int descriptorSize() const CV_OVERRIDE
|
||||||
{
|
{
|
||||||
return strings_;
|
return strings_;
|
||||||
@ -99,6 +102,35 @@ public:
|
|||||||
{
|
{
|
||||||
return octaves;
|
return octaves;
|
||||||
}
|
}
|
||||||
|
virtual void setPatternScale(float _patternScale) CV_OVERRIDE
|
||||||
|
{
|
||||||
|
patternScale = _patternScale;
|
||||||
|
std::vector<float> rList;
|
||||||
|
std::vector<int> nList;
|
||||||
|
|
||||||
|
// this is the standard pattern found to be suitable also
|
||||||
|
rList.resize(5);
|
||||||
|
nList.resize(5);
|
||||||
|
const double f = 0.85 * patternScale;
|
||||||
|
|
||||||
|
rList[0] = (float)(f * 0.);
|
||||||
|
rList[1] = (float)(f * 2.9);
|
||||||
|
rList[2] = (float)(f * 4.9);
|
||||||
|
rList[3] = (float)(f * 7.4);
|
||||||
|
rList[4] = (float)(f * 10.8);
|
||||||
|
|
||||||
|
nList[0] = 1;
|
||||||
|
nList[1] = 10;
|
||||||
|
nList[2] = 14;
|
||||||
|
nList[3] = 15;
|
||||||
|
nList[4] = 20;
|
||||||
|
|
||||||
|
generateKernel(rList, nList, (float)(5.85 * patternScale), (float)(8.2 * patternScale));
|
||||||
|
}
|
||||||
|
virtual float getPatternScale() const CV_OVERRIDE
|
||||||
|
{
|
||||||
|
return patternScale;
|
||||||
|
}
|
||||||
|
|
||||||
// call this to generate the kernel:
|
// call this to generate the kernel:
|
||||||
// circle of radius r (pixels), with n points;
|
// circle of radius r (pixels), with n points;
|
||||||
@ -122,6 +154,7 @@ protected:
|
|||||||
// Feature parameters
|
// Feature parameters
|
||||||
CV_PROP_RW int threshold;
|
CV_PROP_RW int threshold;
|
||||||
CV_PROP_RW int octaves;
|
CV_PROP_RW int octaves;
|
||||||
|
CV_PROP_RW float patternScale;
|
||||||
|
|
||||||
// some helper structures for the Brisk pattern representation
|
// some helper structures for the Brisk pattern representation
|
||||||
struct BriskPatternPoint{
|
struct BriskPatternPoint{
|
||||||
@ -309,32 +342,12 @@ const float BriskScaleSpace::safetyFactor_ = 1.0f;
|
|||||||
const float BriskScaleSpace::basicSize_ = 12.0f;
|
const float BriskScaleSpace::basicSize_ = 12.0f;
|
||||||
|
|
||||||
// constructors
|
// constructors
|
||||||
BRISK_Impl::BRISK_Impl(int thresh, int octaves_in, float patternScale)
|
BRISK_Impl::BRISK_Impl(int _threshold, int _octaves, float _patternScale)
|
||||||
{
|
{
|
||||||
threshold = thresh;
|
threshold = _threshold;
|
||||||
octaves = octaves_in;
|
octaves = _octaves;
|
||||||
|
|
||||||
std::vector<float> rList;
|
setPatternScale(_patternScale);
|
||||||
std::vector<int> nList;
|
|
||||||
|
|
||||||
// this is the standard pattern found to be suitable also
|
|
||||||
rList.resize(5);
|
|
||||||
nList.resize(5);
|
|
||||||
const double f = 0.85 * patternScale;
|
|
||||||
|
|
||||||
rList[0] = (float)(f * 0.);
|
|
||||||
rList[1] = (float)(f * 2.9);
|
|
||||||
rList[2] = (float)(f * 4.9);
|
|
||||||
rList[3] = (float)(f * 7.4);
|
|
||||||
rList[4] = (float)(f * 10.8);
|
|
||||||
|
|
||||||
nList[0] = 1;
|
|
||||||
nList[1] = 10;
|
|
||||||
nList[2] = 14;
|
|
||||||
nList[3] = 15;
|
|
||||||
nList[4] = 20;
|
|
||||||
|
|
||||||
generateKernel(rList, nList, (float)(5.85 * patternScale), (float)(8.2 * patternScale));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
BRISK_Impl::BRISK_Impl(const std::vector<float> &radiusList,
|
BRISK_Impl::BRISK_Impl(const std::vector<float> &radiusList,
|
||||||
@ -359,6 +372,31 @@ BRISK_Impl::BRISK_Impl(int thresh,
|
|||||||
octaves = octaves_in;
|
octaves = octaves_in;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void BRISK_Impl::read( const FileNode& fn)
|
||||||
|
{
|
||||||
|
// if node is empty, keep previous value
|
||||||
|
if (!fn["threshold"].empty())
|
||||||
|
fn["threshold"] >> threshold;
|
||||||
|
if (!fn["octaves"].empty())
|
||||||
|
fn["octaves"] >> octaves;
|
||||||
|
if (!fn["patternScale"].empty())
|
||||||
|
{
|
||||||
|
float _patternScale;
|
||||||
|
fn["patternScale"] >> _patternScale;
|
||||||
|
setPatternScale(_patternScale);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
void BRISK_Impl::write( FileStorage& fs) const
|
||||||
|
{
|
||||||
|
if(fs.isOpened())
|
||||||
|
{
|
||||||
|
fs << "name" << getDefaultName();
|
||||||
|
fs << "threshold" << threshold;
|
||||||
|
fs << "octaves" << octaves;
|
||||||
|
fs << "patternScale" << patternScale;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
BRISK_Impl::generateKernel(const std::vector<float> &radiusList,
|
BRISK_Impl::generateKernel(const std::vector<float> &radiusList,
|
||||||
const std::vector<int> &numberList,
|
const std::vector<int> &numberList,
|
||||||
|
@ -539,6 +539,27 @@ public:
|
|||||||
: threshold(_threshold), nonmaxSuppression(_nonmaxSuppression), type(_type)
|
: threshold(_threshold), nonmaxSuppression(_nonmaxSuppression), type(_type)
|
||||||
{}
|
{}
|
||||||
|
|
||||||
|
void read( const FileNode& fn) CV_OVERRIDE
|
||||||
|
{
|
||||||
|
// if node is empty, keep previous value
|
||||||
|
if (!fn["threshold"].empty())
|
||||||
|
fn["threshold"] >> threshold;
|
||||||
|
if (!fn["nonmaxSuppression"].empty())
|
||||||
|
fn["nonmaxSuppression"] >> nonmaxSuppression;
|
||||||
|
if (!fn["type"].empty())
|
||||||
|
fn["type"] >> type;
|
||||||
|
}
|
||||||
|
void write( FileStorage& fs) const CV_OVERRIDE
|
||||||
|
{
|
||||||
|
if(fs.isOpened())
|
||||||
|
{
|
||||||
|
fs << "name" << getDefaultName();
|
||||||
|
fs << "threshold" << threshold;
|
||||||
|
fs << "nonmaxSuppression" << nonmaxSuppression;
|
||||||
|
fs << "type" << type;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
void detect( InputArray _image, std::vector<KeyPoint>& keypoints, InputArray _mask ) CV_OVERRIDE
|
void detect( InputArray _image, std::vector<KeyPoint>& keypoints, InputArray _mask ) CV_OVERRIDE
|
||||||
{
|
{
|
||||||
CV_INSTRUMENT_REGION();
|
CV_INSTRUMENT_REGION();
|
||||||
|
@ -55,6 +55,39 @@ public:
|
|||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void read( const FileNode& fn) CV_OVERRIDE
|
||||||
|
{
|
||||||
|
// if node is empty, keep previous value
|
||||||
|
if (!fn["nfeatures"].empty())
|
||||||
|
fn["nfeatures"] >> nfeatures;
|
||||||
|
if (!fn["qualityLevel"].empty())
|
||||||
|
fn["qualityLevel"] >> qualityLevel;
|
||||||
|
if (!fn["minDistance"].empty())
|
||||||
|
fn["minDistance"] >> minDistance;
|
||||||
|
if (!fn["blockSize"].empty())
|
||||||
|
fn["blockSize"] >> blockSize;
|
||||||
|
if (!fn["gradSize"].empty())
|
||||||
|
fn["gradSize"] >> gradSize;
|
||||||
|
if (!fn["useHarrisDetector"].empty())
|
||||||
|
fn["useHarrisDetector"] >> useHarrisDetector;
|
||||||
|
if (!fn["k"].empty())
|
||||||
|
fn["k"] >> k;
|
||||||
|
}
|
||||||
|
void write( FileStorage& fs) const CV_OVERRIDE
|
||||||
|
{
|
||||||
|
if(fs.isOpened())
|
||||||
|
{
|
||||||
|
fs << "name" << getDefaultName();
|
||||||
|
fs << "nfeatures" << nfeatures;
|
||||||
|
fs << "qualityLevel" << qualityLevel;
|
||||||
|
fs << "minDistance" << minDistance;
|
||||||
|
fs << "blockSize" << blockSize;
|
||||||
|
fs << "gradSize" << gradSize;
|
||||||
|
fs << "useHarrisDetector" << useHarrisDetector;
|
||||||
|
fs << "k" << k;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
void setMaxFeatures(int maxFeatures) CV_OVERRIDE { nfeatures = maxFeatures; }
|
void setMaxFeatures(int maxFeatures) CV_OVERRIDE { nfeatures = maxFeatures; }
|
||||||
int getMaxFeatures() const CV_OVERRIDE { return nfeatures; }
|
int getMaxFeatures() const CV_OVERRIDE { return nfeatures; }
|
||||||
|
|
||||||
@ -67,8 +100,8 @@ public:
|
|||||||
void setBlockSize(int blockSize_) CV_OVERRIDE { blockSize = blockSize_; }
|
void setBlockSize(int blockSize_) CV_OVERRIDE { blockSize = blockSize_; }
|
||||||
int getBlockSize() const CV_OVERRIDE { return blockSize; }
|
int getBlockSize() const CV_OVERRIDE { return blockSize; }
|
||||||
|
|
||||||
//void setGradientSize(int gradientSize_) { gradSize = gradientSize_; }
|
void setGradientSize(int gradientSize_) CV_OVERRIDE { gradSize = gradientSize_; }
|
||||||
//int getGradientSize() { return gradSize; }
|
int getGradientSize() CV_OVERRIDE { return gradSize; }
|
||||||
|
|
||||||
void setHarrisDetector(bool val) CV_OVERRIDE { useHarrisDetector = val; }
|
void setHarrisDetector(bool val) CV_OVERRIDE { useHarrisDetector = val; }
|
||||||
bool getHarrisDetector() const CV_OVERRIDE { return useHarrisDetector; }
|
bool getHarrisDetector() const CV_OVERRIDE { return useHarrisDetector; }
|
||||||
|
@ -163,6 +163,7 @@ namespace cv
|
|||||||
void write(FileStorage& fs) const CV_OVERRIDE
|
void write(FileStorage& fs) const CV_OVERRIDE
|
||||||
{
|
{
|
||||||
writeFormat(fs);
|
writeFormat(fs);
|
||||||
|
fs << "name" << getDefaultName();
|
||||||
fs << "extended" << (int)extended;
|
fs << "extended" << (int)extended;
|
||||||
fs << "upright" << (int)upright;
|
fs << "upright" << (int)upright;
|
||||||
fs << "threshold" << threshold;
|
fs << "threshold" << threshold;
|
||||||
@ -173,12 +174,19 @@ namespace cv
|
|||||||
|
|
||||||
void read(const FileNode& fn) CV_OVERRIDE
|
void read(const FileNode& fn) CV_OVERRIDE
|
||||||
{
|
{
|
||||||
extended = (int)fn["extended"] != 0;
|
// if node is empty, keep previous value
|
||||||
upright = (int)fn["upright"] != 0;
|
if (!fn["extended"].empty())
|
||||||
threshold = (float)fn["threshold"];
|
extended = (int)fn["extended"] != 0;
|
||||||
octaves = (int)fn["octaves"];
|
if (!fn["upright"].empty())
|
||||||
sublevels = (int)fn["sublevels"];
|
upright = (int)fn["upright"] != 0;
|
||||||
diffusivity = static_cast<KAZE::DiffusivityType>((int)fn["diffusivity"]);
|
if (!fn["threshold"].empty())
|
||||||
|
threshold = (float)fn["threshold"];
|
||||||
|
if (!fn["octaves"].empty())
|
||||||
|
octaves = (int)fn["octaves"];
|
||||||
|
if (!fn["sublevels"].empty())
|
||||||
|
sublevels = (int)fn["sublevels"];
|
||||||
|
if (!fn["diffusivity"].empty())
|
||||||
|
diffusivity = static_cast<KAZE::DiffusivityType>((int)fn["diffusivity"]);
|
||||||
}
|
}
|
||||||
|
|
||||||
bool extended;
|
bool extended;
|
||||||
|
@ -87,6 +87,48 @@ public:
|
|||||||
|
|
||||||
virtual ~MSER_Impl() CV_OVERRIDE {}
|
virtual ~MSER_Impl() CV_OVERRIDE {}
|
||||||
|
|
||||||
|
void read( const FileNode& fn) CV_OVERRIDE
|
||||||
|
{
|
||||||
|
// if node is empty, keep previous value
|
||||||
|
if (!fn["delta"].empty())
|
||||||
|
fn["delta"] >> params.delta;
|
||||||
|
if (!fn["minArea"].empty())
|
||||||
|
fn["minArea"] >> params.minArea;
|
||||||
|
if (!fn["maxArea"].empty())
|
||||||
|
fn["maxArea"] >> params.maxArea;
|
||||||
|
if (!fn["maxVariation"].empty())
|
||||||
|
fn["maxVariation"] >> params.maxVariation;
|
||||||
|
if (!fn["minDiversity"].empty())
|
||||||
|
fn["minDiversity"] >> params.minDiversity;
|
||||||
|
if (!fn["maxEvolution"].empty())
|
||||||
|
fn["maxEvolution"] >> params.maxEvolution;
|
||||||
|
if (!fn["areaThreshold"].empty())
|
||||||
|
fn["areaThreshold"] >> params.areaThreshold;
|
||||||
|
if (!fn["minMargin"].empty())
|
||||||
|
fn["minMargin"] >> params.minMargin;
|
||||||
|
if (!fn["edgeBlurSize"].empty())
|
||||||
|
fn["edgeBlurSize"] >> params.edgeBlurSize;
|
||||||
|
if (!fn["pass2Only"].empty())
|
||||||
|
fn["pass2Only"] >> params.pass2Only;
|
||||||
|
}
|
||||||
|
void write( FileStorage& fs) const CV_OVERRIDE
|
||||||
|
{
|
||||||
|
if(fs.isOpened())
|
||||||
|
{
|
||||||
|
fs << "name" << getDefaultName();
|
||||||
|
fs << "delta" << params.delta;
|
||||||
|
fs << "minArea" << params.minArea;
|
||||||
|
fs << "maxArea" << params.maxArea;
|
||||||
|
fs << "maxVariation" << params.maxVariation;
|
||||||
|
fs << "minDiversity" << params.minDiversity;
|
||||||
|
fs << "maxEvolution" << params.maxEvolution;
|
||||||
|
fs << "areaThreshold" << params.areaThreshold;
|
||||||
|
fs << "minMargin" << params.minMargin;
|
||||||
|
fs << "edgeBlurSize" << params.edgeBlurSize;
|
||||||
|
fs << "pass2Only" << params.pass2Only;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
void setDelta(int delta) CV_OVERRIDE { params.delta = delta; }
|
void setDelta(int delta) CV_OVERRIDE { params.delta = delta; }
|
||||||
int getDelta() const CV_OVERRIDE { return params.delta; }
|
int getDelta() const CV_OVERRIDE { return params.delta; }
|
||||||
|
|
||||||
@ -96,6 +138,24 @@ public:
|
|||||||
void setMaxArea(int maxArea) CV_OVERRIDE { params.maxArea = maxArea; }
|
void setMaxArea(int maxArea) CV_OVERRIDE { params.maxArea = maxArea; }
|
||||||
int getMaxArea() const CV_OVERRIDE { return params.maxArea; }
|
int getMaxArea() const CV_OVERRIDE { return params.maxArea; }
|
||||||
|
|
||||||
|
void setMaxVariation(double maxVariation) CV_OVERRIDE { params.maxVariation = maxVariation; }
|
||||||
|
double getMaxVariation() const CV_OVERRIDE { return params.maxVariation; }
|
||||||
|
|
||||||
|
void setMinDiversity(double minDiversity) CV_OVERRIDE { params.minDiversity = minDiversity; }
|
||||||
|
double getMinDiversity() const CV_OVERRIDE { return params.minDiversity; }
|
||||||
|
|
||||||
|
void setMaxEvolution(int maxEvolution) CV_OVERRIDE { params.maxEvolution = maxEvolution; }
|
||||||
|
int getMaxEvolution() const CV_OVERRIDE { return params.maxEvolution; }
|
||||||
|
|
||||||
|
void setAreaThreshold(double areaThreshold) CV_OVERRIDE { params.areaThreshold = areaThreshold; }
|
||||||
|
double getAreaThreshold() const CV_OVERRIDE { return params.areaThreshold; }
|
||||||
|
|
||||||
|
void setMinMargin(double min_margin) CV_OVERRIDE { params.minMargin = min_margin; }
|
||||||
|
double getMinMargin() const CV_OVERRIDE { return params.minMargin; }
|
||||||
|
|
||||||
|
void setEdgeBlurSize(int edge_blur_size) CV_OVERRIDE { params.edgeBlurSize = edge_blur_size; }
|
||||||
|
int getEdgeBlurSize() const CV_OVERRIDE { return params.edgeBlurSize; }
|
||||||
|
|
||||||
void setPass2Only(bool f) CV_OVERRIDE { params.pass2Only = f; }
|
void setPass2Only(bool f) CV_OVERRIDE { params.pass2Only = f; }
|
||||||
bool getPass2Only() const CV_OVERRIDE { return params.pass2Only; }
|
bool getPass2Only() const CV_OVERRIDE { return params.pass2Only; }
|
||||||
|
|
||||||
|
@ -666,6 +666,9 @@ public:
|
|||||||
scoreType(_scoreType), patchSize(_patchSize), fastThreshold(_fastThreshold)
|
scoreType(_scoreType), patchSize(_patchSize), fastThreshold(_fastThreshold)
|
||||||
{}
|
{}
|
||||||
|
|
||||||
|
void read( const FileNode& fn) CV_OVERRIDE;
|
||||||
|
void write( FileStorage& fs) const CV_OVERRIDE;
|
||||||
|
|
||||||
void setMaxFeatures(int maxFeatures) CV_OVERRIDE { nfeatures = maxFeatures; }
|
void setMaxFeatures(int maxFeatures) CV_OVERRIDE { nfeatures = maxFeatures; }
|
||||||
int getMaxFeatures() const CV_OVERRIDE { return nfeatures; }
|
int getMaxFeatures() const CV_OVERRIDE { return nfeatures; }
|
||||||
|
|
||||||
@ -717,6 +720,45 @@ protected:
|
|||||||
int fastThreshold;
|
int fastThreshold;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
void ORB_Impl::read( const FileNode& fn)
|
||||||
|
{
|
||||||
|
// if node is empty, keep previous value
|
||||||
|
if (!fn["nfeatures"].empty())
|
||||||
|
fn["nfeatures"] >> nfeatures;
|
||||||
|
if (!fn["scaleFactor"].empty())
|
||||||
|
fn["scaleFactor"] >> scaleFactor;
|
||||||
|
if (!fn["nlevels"].empty())
|
||||||
|
fn["nlevels"] >> nlevels;
|
||||||
|
if (!fn["edgeThreshold"].empty())
|
||||||
|
fn["edgeThreshold"] >> edgeThreshold;
|
||||||
|
if (!fn["firstLevel"].empty())
|
||||||
|
fn["firstLevel"] >> firstLevel;
|
||||||
|
if (!fn["wta_k"].empty())
|
||||||
|
fn["wta_k"] >> wta_k;
|
||||||
|
if (!fn["scoreType"].empty())
|
||||||
|
fn["scoreType"] >> scoreType;
|
||||||
|
if (!fn["patchSize"].empty())
|
||||||
|
fn["patchSize"] >> patchSize;
|
||||||
|
if (!fn["fastThreshold"].empty())
|
||||||
|
fn["fastThreshold"] >> fastThreshold;
|
||||||
|
}
|
||||||
|
void ORB_Impl::write( FileStorage& fs) const
|
||||||
|
{
|
||||||
|
if(fs.isOpened())
|
||||||
|
{
|
||||||
|
fs << "name" << getDefaultName();
|
||||||
|
fs << "nfeatures" << nfeatures;
|
||||||
|
fs << "scaleFactor" << scaleFactor;
|
||||||
|
fs << "nlevels" << nlevels;
|
||||||
|
fs << "edgeThreshold" << edgeThreshold;
|
||||||
|
fs << "firstLevel" << firstLevel;
|
||||||
|
fs << "wta_k" << wta_k;
|
||||||
|
fs << "scoreType" << scoreType;
|
||||||
|
fs << "patchSize" << patchSize;
|
||||||
|
fs << "fastThreshold" << fastThreshold;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
int ORB_Impl::descriptorSize() const
|
int ORB_Impl::descriptorSize() const
|
||||||
{
|
{
|
||||||
return kBytes;
|
return kBytes;
|
||||||
|
@ -111,6 +111,24 @@ public:
|
|||||||
void findScaleSpaceExtrema( const std::vector<Mat>& gauss_pyr, const std::vector<Mat>& dog_pyr,
|
void findScaleSpaceExtrema( const std::vector<Mat>& gauss_pyr, const std::vector<Mat>& dog_pyr,
|
||||||
std::vector<KeyPoint>& keypoints ) const;
|
std::vector<KeyPoint>& keypoints ) const;
|
||||||
|
|
||||||
|
void read( const FileNode& fn) CV_OVERRIDE;
|
||||||
|
void write( FileStorage& fs) const CV_OVERRIDE;
|
||||||
|
|
||||||
|
void setNFeatures(int maxFeatures) CV_OVERRIDE { nfeatures = maxFeatures; }
|
||||||
|
int getNFeatures() const CV_OVERRIDE { return nfeatures; }
|
||||||
|
|
||||||
|
void setNOctaveLayers(int nOctaveLayers_) CV_OVERRIDE { nOctaveLayers = nOctaveLayers_; }
|
||||||
|
int getNOctaveLayers() const CV_OVERRIDE { return nOctaveLayers; }
|
||||||
|
|
||||||
|
void setContrastThreshold(double contrastThreshold_) CV_OVERRIDE { contrastThreshold = contrastThreshold_; }
|
||||||
|
double getContrastThreshold() const CV_OVERRIDE { return contrastThreshold; }
|
||||||
|
|
||||||
|
void setEdgeThreshold(double edgeThreshold_) CV_OVERRIDE { edgeThreshold = edgeThreshold_; }
|
||||||
|
double getEdgeThreshold() const CV_OVERRIDE { return edgeThreshold; }
|
||||||
|
|
||||||
|
void setSigma(double sigma_) CV_OVERRIDE { sigma = sigma_; }
|
||||||
|
double getSigma() const CV_OVERRIDE { return sigma; }
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
CV_PROP_RW int nfeatures;
|
CV_PROP_RW int nfeatures;
|
||||||
CV_PROP_RW int nOctaveLayers;
|
CV_PROP_RW int nOctaveLayers;
|
||||||
@ -554,4 +572,34 @@ void SIFT_Impl::detectAndCompute(InputArray _image, InputArray _mask,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void SIFT_Impl::read( const FileNode& fn)
|
||||||
|
{
|
||||||
|
// if node is empty, keep previous value
|
||||||
|
if (!fn["nfeatures"].empty())
|
||||||
|
fn["nfeatures"] >> nfeatures;
|
||||||
|
if (!fn["nOctaveLayers"].empty())
|
||||||
|
fn["nOctaveLayers"] >> nOctaveLayers;
|
||||||
|
if (!fn["contrastThreshold"].empty())
|
||||||
|
fn["contrastThreshold"] >> contrastThreshold;
|
||||||
|
if (!fn["edgeThreshold"].empty())
|
||||||
|
fn["edgeThreshold"] >> edgeThreshold;
|
||||||
|
if (!fn["sigma"].empty())
|
||||||
|
fn["sigma"] >> sigma;
|
||||||
|
if (!fn["descriptorType"].empty())
|
||||||
|
fn["descriptorType"] >> descriptor_type;
|
||||||
|
}
|
||||||
|
void SIFT_Impl::write( FileStorage& fs) const
|
||||||
|
{
|
||||||
|
if(fs.isOpened())
|
||||||
|
{
|
||||||
|
fs << "name" << getDefaultName();
|
||||||
|
fs << "nfeatures" << nfeatures;
|
||||||
|
fs << "nOctaveLayers" << nOctaveLayers;
|
||||||
|
fs << "contrastThreshold" << contrastThreshold;
|
||||||
|
fs << "edgeThreshold" << edgeThreshold;
|
||||||
|
fs << "sigma" << sigma;
|
||||||
|
fs << "descriptorType" << descriptor_type;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -68,11 +68,11 @@ class JavaParser:
|
|||||||
if os.path.isfile(path):
|
if os.path.isfile(path):
|
||||||
if path.endswith("FeatureDetector.java"):
|
if path.endswith("FeatureDetector.java"):
|
||||||
for prefix1 in ("", "Grid", "Pyramid", "Dynamic"):
|
for prefix1 in ("", "Grid", "Pyramid", "Dynamic"):
|
||||||
for prefix2 in ("FAST", "STAR", "MSER", "ORB", "SIFT", "SURF", "GFTT", "HARRIS", "SIMPLEBLOB", "DENSE"):
|
for prefix2 in ("FAST", "STAR", "MSER", "ORB", "SIFT", "SURF", "GFTT", "HARRIS", "SIMPLEBLOB", "DENSE", "AKAZE", "KAZE", "BRISK", "AGAST"):
|
||||||
parser.parse_file(path,prefix1+prefix2)
|
parser.parse_file(path,prefix1+prefix2)
|
||||||
elif path.endswith("DescriptorExtractor.java"):
|
elif path.endswith("DescriptorExtractor.java"):
|
||||||
for prefix1 in ("", "Opponent"):
|
for prefix1 in ("", "Opponent"):
|
||||||
for prefix2 in ("BRIEF", "ORB", "SIFT", "SURF"):
|
for prefix2 in ("BRIEF", "ORB", "SIFT", "SURF", "AKAZE", "KAZE", "BEBLID", "DAISY", "FREAK", "LUCID", "LATCH"):
|
||||||
parser.parse_file(path,prefix1+prefix2)
|
parser.parse_file(path,prefix1+prefix2)
|
||||||
elif path.endswith("GenericDescriptorMatcher.java"):
|
elif path.endswith("GenericDescriptorMatcher.java"):
|
||||||
for prefix in ("OneWay", "Fern"):
|
for prefix in ("OneWay", "Fern"):
|
||||||
|
Loading…
Reference in New Issue
Block a user