mirror of
https://github.com/opencv/opencv.git
synced 2025-06-11 03:33:28 +08:00
Merge remote-tracking branch 'upstream/3.4' into merge-3.4
This commit is contained in:
commit
d3ae175bca
@ -1670,6 +1670,12 @@ static double cvCalibrateCamera2Internal( const CvMat* objectPoints,
|
||||
}
|
||||
}
|
||||
|
||||
Mat mask = cvarrToMat(solver.mask);
|
||||
int nparams_nz = countNonZero(mask);
|
||||
if (nparams_nz >= 2 * total)
|
||||
CV_Error_(CV_StsBadArg,
|
||||
("There should be less vars to optimize (having %d) than the number of residuals (%d = 2 per point)", nparams_nz, 2 * total));
|
||||
|
||||
// 2. initialize extrinsic parameters
|
||||
for( i = 0, pos = 0; i < nimages; i++, pos += ni )
|
||||
{
|
||||
@ -1795,27 +1801,24 @@ static double cvCalibrateCamera2Internal( const CvMat* objectPoints,
|
||||
{
|
||||
if( stdDevs )
|
||||
{
|
||||
Mat mask = cvarrToMat(solver.mask);
|
||||
int nparams_nz = countNonZero(mask);
|
||||
Mat JtJinv, JtJN;
|
||||
JtJN.create(nparams_nz, nparams_nz, CV_64F);
|
||||
subMatrix(cvarrToMat(_JtJ), JtJN, mask, mask);
|
||||
completeSymm(JtJN, false);
|
||||
cv::invert(JtJN, JtJinv, DECOMP_SVD);
|
||||
//sigma2 is deviation of the noise
|
||||
//see any papers about variance of the least squares estimator for
|
||||
//detailed description of the variance estimation methods
|
||||
double sigma2 = norm(allErrors, NORM_L2SQR) / (total - nparams_nz);
|
||||
// an explanation of that denominator correction can be found here:
|
||||
// R. Hartley, A. Zisserman, Multiple View Geometry in Computer Vision, 2004, section 5.1.3, page 134
|
||||
// see the discussion for more details: https://github.com/opencv/opencv/pull/22992
|
||||
int nErrors = 2 * total - nparams_nz;
|
||||
double sigma2 = norm(allErrors, NORM_L2SQR) / nErrors;
|
||||
Mat stdDevsM = cvarrToMat(stdDevs);
|
||||
int j = 0;
|
||||
for ( int s = 0; s < nparams; s++ )
|
||||
{
|
||||
stdDevsM.at<double>(s) = mask.data[s] ? std::sqrt(JtJinv.at<double>(j,j) * sigma2) : 0.0;
|
||||
if( mask.data[s] )
|
||||
{
|
||||
stdDevsM.at<double>(s) = std::sqrt(JtJinv.at<double>(j,j) * sigma2);
|
||||
j++;
|
||||
}
|
||||
else
|
||||
stdDevsM.at<double>(s) = 0.;
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
@ -1594,13 +1594,18 @@ void cv::internal::EstimateUncertainties(InputArrayOfArrays objectPoints, InputA
|
||||
|
||||
Vec<double, 1> sigma_x;
|
||||
meanStdDev(ex.reshape(1, 1), noArray(), sigma_x);
|
||||
sigma_x *= sqrt(2.0 * (double)ex.total()/(2.0 * (double)ex.total() - 1.0));
|
||||
|
||||
Mat JJ2, ex3;
|
||||
ComputeJacobians(objectPoints, imagePoints, params, omc, Tc, check_cond, thresh_cond, JJ2, ex3);
|
||||
|
||||
sqrt(JJ2.inv(), JJ2);
|
||||
|
||||
int nParams = JJ2.rows;
|
||||
// an explanation of that denominator correction can be found here:
|
||||
// R. Hartley, A. Zisserman, Multiple View Geometry in Computer Vision, 2004, section 5.1.3, page 134
|
||||
// see the discussion for more details: https://github.com/opencv/opencv/pull/22992
|
||||
sigma_x *= sqrt(2.0 * (double)ex.total()/(2.0 * (double)ex.total() - nParams));
|
||||
|
||||
errors = 3 * sigma_x(0) * JJ2.diag();
|
||||
rms = sqrt(norm(ex, NORM_L2SQR)/ex.total());
|
||||
}
|
||||
|
@ -597,9 +597,9 @@ TEST_F(fisheyeTest, EstimateUncertainties)
|
||||
cv::internal::EstimateUncertainties(objectPoints, imagePoints, param, rvec, tvec,
|
||||
errors, err_std, thresh_cond, check_cond, rms);
|
||||
|
||||
EXPECT_MAT_NEAR(errors.f, cv::Vec2d(1.29837104202046, 1.31565641071524), 1e-10);
|
||||
EXPECT_MAT_NEAR(errors.c, cv::Vec2d(0.890439368129246, 0.816096854937896), 1e-10);
|
||||
EXPECT_MAT_NEAR(errors.k, cv::Vec4d(0.00516248605191506, 0.0168181467500934, 0.0213118690274604, 0.00916010877545648), 1e-10);
|
||||
EXPECT_MAT_NEAR(errors.f, cv::Vec2d(1.34250246865020720, 1.36037536429654530), 1e-10);
|
||||
EXPECT_MAT_NEAR(errors.c, cv::Vec2d(0.92070526160049848, 0.84383585812851514), 1e-10);
|
||||
EXPECT_MAT_NEAR(errors.k, cv::Vec4d(0.0053379581373996041, 0.017389792901700545, 0.022036256089491224, 0.0094714594258908952), 1e-10);
|
||||
EXPECT_MAT_NEAR(err_std, cv::Vec2d(0.187475975266883, 0.185678953263995), 1e-10);
|
||||
CV_Assert(fabs(rms - 0.263782587133546) < 1e-10);
|
||||
CV_Assert(errors.alpha == 0);
|
||||
|
@ -275,7 +275,9 @@ namespace {
|
||||
void recordException(const cv::String& msg)
|
||||
#endif
|
||||
{
|
||||
#ifndef CV_THREAD_SANITIZER
|
||||
if (!hasException)
|
||||
#endif
|
||||
{
|
||||
cv::AutoLock lock(cv::getInitializationMutex());
|
||||
if (!hasException)
|
||||
|
59
modules/imgproc/src/drawing.cpp
Normal file → Executable file
59
modules/imgproc/src/drawing.cpp
Normal file → Executable file
@ -63,7 +63,7 @@ CollectPolyEdges( Mat& img, const Point2l* v, int npts,
|
||||
int shift, Point offset=Point() );
|
||||
|
||||
static void
|
||||
FillEdgeCollection( Mat& img, std::vector<PolyEdge>& edges, const void* color );
|
||||
FillEdgeCollection( Mat& img, std::vector<PolyEdge>& edges, const void* color, int line_type);
|
||||
|
||||
static void
|
||||
PolyLine( Mat& img, const Point2l* v, int npts, bool closed,
|
||||
@ -1049,7 +1049,7 @@ EllipseEx( Mat& img, Point2l center, Size2l axes,
|
||||
v.push_back(center);
|
||||
std::vector<PolyEdge> edges;
|
||||
CollectPolyEdges( img, &v[0], (int)v.size(), edges, color, line_type, XY_SHIFT );
|
||||
FillEdgeCollection( img, edges, color );
|
||||
FillEdgeCollection( img, edges, color, line_type );
|
||||
}
|
||||
}
|
||||
|
||||
@ -1277,37 +1277,60 @@ CollectPolyEdges( Mat& img, const Point2l* v, int count, std::vector<PolyEdge>&
|
||||
pt1.x = (pt1.x + offset.x) << (XY_SHIFT - shift);
|
||||
pt1.y = (pt1.y + delta) >> shift;
|
||||
|
||||
if( line_type < cv::LINE_AA )
|
||||
Point2l pt0c(pt0), pt1c(pt1);
|
||||
|
||||
if (line_type < cv::LINE_AA)
|
||||
{
|
||||
t0.y = pt0.y; t1.y = pt1.y;
|
||||
t0.x = (pt0.x + (XY_ONE >> 1)) >> XY_SHIFT;
|
||||
t1.x = (pt1.x + (XY_ONE >> 1)) >> XY_SHIFT;
|
||||
Line( img, t0, t1, color, line_type );
|
||||
Line(img, t0, t1, color, line_type);
|
||||
|
||||
// use clipped endpoints to create a more accurate PolyEdge
|
||||
if ((unsigned)t0.x >= (unsigned)(img.cols) ||
|
||||
(unsigned)t1.x >= (unsigned)(img.cols) ||
|
||||
(unsigned)t0.y >= (unsigned)(img.rows) ||
|
||||
(unsigned)t1.y >= (unsigned)(img.rows))
|
||||
{
|
||||
clipLine(img.size(), t0, t1);
|
||||
|
||||
if (t0.y != t1.y)
|
||||
{
|
||||
pt0c.y = t0.y; pt1c.y = t1.y;
|
||||
pt0c.x = (int64)(t0.x) << XY_SHIFT;
|
||||
pt1c.x = (int64)(t1.x) << XY_SHIFT;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
pt0c.x += XY_ONE >> 1;
|
||||
pt1c.x += XY_ONE >> 1;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
t0.x = pt0.x; t1.x = pt1.x;
|
||||
t0.y = pt0.y << XY_SHIFT;
|
||||
t1.y = pt1.y << XY_SHIFT;
|
||||
LineAA( img, t0, t1, color );
|
||||
LineAA(img, t0, t1, color);
|
||||
}
|
||||
|
||||
if( pt0.y == pt1.y )
|
||||
if (pt0.y == pt1.y)
|
||||
continue;
|
||||
|
||||
if( pt0.y < pt1.y )
|
||||
edge.dx = (pt1c.x - pt0c.x) / (pt1c.y - pt0c.y);
|
||||
if (pt0.y < pt1.y)
|
||||
{
|
||||
edge.y0 = (int)(pt0.y);
|
||||
edge.y1 = (int)(pt1.y);
|
||||
edge.x = pt0.x;
|
||||
edge.x = pt0c.x + (pt0.y - pt0c.y) * edge.dx; // correct starting point for clipped lines
|
||||
}
|
||||
else
|
||||
{
|
||||
edge.y0 = (int)(pt1.y);
|
||||
edge.y1 = (int)(pt0.y);
|
||||
edge.x = pt1.x;
|
||||
edge.x = pt1c.x + (pt1.y - pt1c.y) * edge.dx; // correct starting point for clipped lines
|
||||
}
|
||||
edge.dx = (pt1.x - pt0.x) / (pt1.y - pt0.y);
|
||||
edges.push_back(edge);
|
||||
}
|
||||
}
|
||||
@ -1324,7 +1347,7 @@ struct CmpEdges
|
||||
/**************** helper macros and functions for sequence/contour processing ***********/
|
||||
|
||||
static void
|
||||
FillEdgeCollection( Mat& img, std::vector<PolyEdge>& edges, const void* color )
|
||||
FillEdgeCollection( Mat& img, std::vector<PolyEdge>& edges, const void* color, int line_type)
|
||||
{
|
||||
PolyEdge tmp;
|
||||
int i, y, total = (int)edges.size();
|
||||
@ -1333,6 +1356,12 @@ FillEdgeCollection( Mat& img, std::vector<PolyEdge>& edges, const void* color )
|
||||
int y_max = INT_MIN, y_min = INT_MAX;
|
||||
int64 x_max = 0xFFFFFFFFFFFFFFFF, x_min = 0x7FFFFFFFFFFFFFFF;
|
||||
int pix_size = (int)img.elemSize();
|
||||
int delta;
|
||||
|
||||
if (line_type < CV_AA)
|
||||
delta = 0;
|
||||
else
|
||||
delta = XY_ONE - 1;
|
||||
|
||||
if( total < 2 )
|
||||
return;
|
||||
@ -1411,12 +1440,12 @@ FillEdgeCollection( Mat& img, std::vector<PolyEdge>& edges, const void* color )
|
||||
|
||||
if (keep_prelast->x > prelast->x)
|
||||
{
|
||||
x1 = (int)((prelast->x + XY_ONE - 1) >> XY_SHIFT);
|
||||
x1 = (int)((prelast->x + delta) >> XY_SHIFT);
|
||||
x2 = (int)(keep_prelast->x >> XY_SHIFT);
|
||||
}
|
||||
else
|
||||
{
|
||||
x1 = (int)((keep_prelast->x + XY_ONE - 1) >> XY_SHIFT);
|
||||
x1 = (int)((keep_prelast->x + delta) >> XY_SHIFT);
|
||||
x2 = (int)(prelast->x >> XY_SHIFT);
|
||||
}
|
||||
|
||||
@ -2017,7 +2046,7 @@ void fillPoly( InputOutputArray _img, const Point** pts, const int* npts, int nc
|
||||
CollectPolyEdges(img, _pts.data(), npts[i], edges, buf, line_type, shift, offset);
|
||||
}
|
||||
|
||||
FillEdgeCollection(img, edges, buf);
|
||||
FillEdgeCollection(img, edges, buf, line_type);
|
||||
}
|
||||
|
||||
void polylines( InputOutputArray _img, const Point* const* pts, const int* npts, int ncontours, bool isClosed,
|
||||
@ -2672,7 +2701,7 @@ cvDrawContours( void* _img, CvSeq* contour,
|
||||
}
|
||||
|
||||
if( thickness < 0 )
|
||||
cv::FillEdgeCollection( img, edges, ext_buf );
|
||||
cv::FillEdgeCollection( img, edges, ext_buf, line_type);
|
||||
|
||||
if( h_next && contour0 )
|
||||
contour0->h_next = h_next;
|
||||
|
@ -680,4 +680,237 @@ TEST(Drawing, fillpoly_circle)
|
||||
EXPECT_LT(diff_fp3, 1.);
|
||||
}
|
||||
|
||||
TEST(Drawing, fillpoly_fully)
|
||||
{
|
||||
unsigned imageWidth = 256;
|
||||
unsigned imageHeight = 256;
|
||||
int type = CV_8UC1;
|
||||
int shift = 0;
|
||||
Point offset(0, 0);
|
||||
cv::LineTypes lineType = LINE_4;
|
||||
|
||||
int imageSizeOffset = 15;
|
||||
|
||||
cv::Mat img(imageHeight, imageWidth, type);
|
||||
img = 0;
|
||||
|
||||
std::vector<cv::Point> polygonPoints;
|
||||
polygonPoints.push_back(cv::Point(100, -50));
|
||||
polygonPoints.push_back(cv::Point(imageSizeOffset, imageHeight - imageSizeOffset));
|
||||
polygonPoints.push_back(cv::Point(imageSizeOffset, imageSizeOffset));
|
||||
|
||||
// convert data
|
||||
std::vector<const cv::Point*> polygonPointPointers(polygonPoints.size());
|
||||
for (size_t i = 0; i < polygonPoints.size(); i++)
|
||||
{
|
||||
polygonPointPointers[i] = &polygonPoints[i];
|
||||
}
|
||||
|
||||
const cv::Point** data = &polygonPointPointers.front();
|
||||
int size = (int)polygonPoints.size();
|
||||
const int* npts = &size;
|
||||
int ncontours = 1;
|
||||
|
||||
// generate image
|
||||
cv::fillPoly(img, data, npts, ncontours, 255, lineType, shift, offset);
|
||||
|
||||
// check for artifacts
|
||||
{
|
||||
cv::Mat binary = img < 128;
|
||||
cv::Mat labelImage(binary.size(), CV_32S);
|
||||
cv::Mat labelCentroids;
|
||||
int labels = cv::connectedComponents(binary, labelImage, 4);
|
||||
EXPECT_EQ(2, labels) << "artifacts occured";
|
||||
}
|
||||
|
||||
// check if filling went over border
|
||||
{
|
||||
int xy_shift = 16, delta = offset.y + ((1 << shift) >> 1);
|
||||
int xy_one = 1 << xy_shift;
|
||||
|
||||
Point pt0(polygonPoints[polygonPoints.size() - 1]), pt1;
|
||||
for (size_t i = 0; i < polygonPoints.size(); i++, pt0 = pt1)
|
||||
{
|
||||
pt1 = polygonPoints[i];
|
||||
|
||||
// offset/shift treated like in fillPoly
|
||||
Point t0(pt0), t1(pt1);
|
||||
|
||||
t0.x = (t0.x + offset.x) << (xy_shift - shift);
|
||||
t0.y = (t0.y + delta) >> shift;
|
||||
|
||||
t1.x = (t1.x + offset.x) << (xy_shift - shift);
|
||||
t1.y = (t1.y + delta) >> shift;
|
||||
|
||||
if (lineType < CV_AA)
|
||||
{
|
||||
t0.x = (t0.x + (xy_one >> 1)) >> xy_shift;
|
||||
t1.x = (t1.x + (xy_one >> 1)) >> xy_shift;
|
||||
|
||||
// LINE_4 to use the same type of line which is used in fillPoly
|
||||
line(img, t0, t1, 0, 1, LINE_4, 0);
|
||||
}
|
||||
else
|
||||
{
|
||||
t0.x >>= (xy_shift);
|
||||
t1.x >>= (xy_shift);
|
||||
line(img, t0, t1, 0, 1, lineType, 0);
|
||||
}
|
||||
|
||||
}
|
||||
cv::Mat binary = img < 254;
|
||||
cv::Mat labelImage(binary.size(), CV_32S);
|
||||
int labels = cv::connectedComponents(binary, labelImage, 4);
|
||||
EXPECT_EQ(2, labels) << "filling went over the border";
|
||||
}
|
||||
}
|
||||
|
||||
PARAM_TEST_CASE(FillPolyFully, unsigned, unsigned, int, int, Point, cv::LineTypes)
|
||||
{
|
||||
unsigned imageWidth;
|
||||
unsigned imageHeight;
|
||||
int type;
|
||||
int shift;
|
||||
Point offset;
|
||||
cv::LineTypes lineType;
|
||||
|
||||
virtual void SetUp()
|
||||
{
|
||||
imageWidth = GET_PARAM(0);
|
||||
imageHeight = GET_PARAM(1);
|
||||
type = GET_PARAM(2);
|
||||
shift = GET_PARAM(3);
|
||||
offset = GET_PARAM(4);
|
||||
lineType = GET_PARAM(5);
|
||||
}
|
||||
|
||||
void draw_polygon(cv::Mat& img, const std::vector<cv::Point>& polygonPoints)
|
||||
{
|
||||
// convert data
|
||||
std::vector<const cv::Point*> polygonPointPointers(polygonPoints.size());
|
||||
for (size_t i = 0; i < polygonPoints.size(); i++)
|
||||
{
|
||||
polygonPointPointers[i] = &polygonPoints[i];
|
||||
}
|
||||
|
||||
const cv::Point** data = &polygonPointPointers.front();
|
||||
int size = (int)polygonPoints.size();
|
||||
const int* npts = &size;
|
||||
int ncontours = 1;
|
||||
|
||||
// generate image
|
||||
cv::fillPoly(img, data, npts, ncontours, 255, lineType, shift, offset);
|
||||
}
|
||||
|
||||
void check_artifacts(cv::Mat& img)
|
||||
{
|
||||
// check for artifacts
|
||||
cv::Mat binary = img < 128;
|
||||
cv::Mat labelImage(binary.size(), CV_32S);
|
||||
cv::Mat labelCentroids;
|
||||
int labels = cv::connectedComponents(binary, labelImage, 4);
|
||||
EXPECT_EQ(2, labels) << "artifacts occured";
|
||||
}
|
||||
|
||||
void check_filling_over_border(cv::Mat& img, const std::vector<cv::Point>& polygonPoints)
|
||||
{
|
||||
int xy_shift = 16, delta = offset.y + ((1 << shift) >> 1);
|
||||
int xy_one = 1 << xy_shift;
|
||||
|
||||
Point pt0(polygonPoints[polygonPoints.size() - 1]), pt1;
|
||||
for (size_t i = 0; i < polygonPoints.size(); i++, pt0 = pt1)
|
||||
{
|
||||
pt1 = polygonPoints[i];
|
||||
|
||||
// offset/shift treated like in fillPoly
|
||||
Point t0(pt0), t1(pt1);
|
||||
|
||||
t0.x = (t0.x + offset.x) << (xy_shift - shift);
|
||||
t0.y = (t0.y + delta) >> shift;
|
||||
|
||||
t1.x = (t1.x + offset.x) << (xy_shift - shift);
|
||||
t1.y = (t1.y + delta) >> shift;
|
||||
|
||||
if (lineType < CV_AA)
|
||||
{
|
||||
t0.x = (t0.x + (xy_one >> 1)) >> xy_shift;
|
||||
t1.x = (t1.x + (xy_one >> 1)) >> xy_shift;
|
||||
|
||||
// LINE_4 to use the same type of line which is used in fillPoly
|
||||
line(img, t0, t1, 0, 1, LINE_4, 0);
|
||||
}
|
||||
else
|
||||
{
|
||||
t0.x >>= (xy_shift);
|
||||
t1.x >>= (xy_shift);
|
||||
line(img, t0, t1, 0, 1, lineType, 0);
|
||||
}
|
||||
|
||||
}
|
||||
cv::Mat binary = img < 254;
|
||||
cv::Mat labelImage(binary.size(), CV_32S);
|
||||
int labels = cv::connectedComponents(binary, labelImage, 4);
|
||||
EXPECT_EQ(2, labels) << "filling went over the border";
|
||||
}
|
||||
|
||||
void run_test(const std::vector<cv::Point>& polygonPoints)
|
||||
{
|
||||
cv::Mat img(imageHeight, imageWidth, type);
|
||||
img = 0;
|
||||
|
||||
draw_polygon(img, polygonPoints);
|
||||
check_artifacts(img);
|
||||
check_filling_over_border(img, polygonPoints);
|
||||
}
|
||||
};
|
||||
|
||||
TEST_P(FillPolyFully, DISABLED_fillpoly_fully)
|
||||
{
|
||||
int imageSizeOffset = 15;
|
||||
|
||||
// testing for polygon with straight edge at left/right side
|
||||
int positions1[2] = { imageSizeOffset, (int)imageWidth - imageSizeOffset };
|
||||
for (size_t i = 0; i < 2; i++)
|
||||
{
|
||||
for (int y = imageHeight + 50; y > -50; y -= 1)
|
||||
{
|
||||
// define polygon
|
||||
std::vector<cv::Point> polygonPoints;
|
||||
polygonPoints.push_back(cv::Point(100, imageHeight - y));
|
||||
polygonPoints.push_back(cv::Point(positions1[i], positions1[1]));
|
||||
polygonPoints.push_back(cv::Point(positions1[i], positions1[0]));
|
||||
|
||||
run_test(polygonPoints);
|
||||
}
|
||||
}
|
||||
|
||||
// testing for polygon with straight edge at top/bottom side
|
||||
int positions2[2] = { imageSizeOffset, (int)imageHeight - imageSizeOffset };
|
||||
for (size_t i = 0; i < 2; i++)
|
||||
{
|
||||
for (int x = imageWidth + 50; x > -50; x -= 1)
|
||||
{
|
||||
// define polygon
|
||||
std::vector<cv::Point> polygonPoints;
|
||||
polygonPoints.push_back(cv::Point(imageWidth - x, 100));
|
||||
polygonPoints.push_back(cv::Point(positions2[1], positions2[i]));
|
||||
polygonPoints.push_back(cv::Point(positions2[0], positions2[i]));
|
||||
|
||||
run_test(polygonPoints);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(
|
||||
FillPolyTest, FillPolyFully,
|
||||
testing::Combine(
|
||||
testing::Values(256),
|
||||
testing::Values(256),
|
||||
testing::Values(CV_8UC1),
|
||||
testing::Values(0, 1, 2),
|
||||
testing::Values(cv::Point(0, 0), cv::Point(10, 10)),
|
||||
testing::Values(LINE_4, LINE_8, LINE_AA)
|
||||
)
|
||||
);
|
||||
|
||||
}} // namespace
|
||||
|
Loading…
Reference in New Issue
Block a user