Boring changes - calib3d.

This commit is contained in:
Roman Donchenko 2013-08-13 17:03:56 +04:00
parent 711fb6bd92
commit bd70a033fc
12 changed files with 96 additions and 95 deletions

View File

@ -515,7 +515,7 @@ findCirclesGrid
-------------------
Finds centers in the grid of circles.
.. ocv:function:: bool findCirclesGrid( InputArray image, Size patternSize, OutputArray centers, int flags=CALIB_CB_SYMMETRIC_GRID, const Ptr<FeatureDetector> &blobDetector = new SimpleBlobDetector() )
.. ocv:function:: bool findCirclesGrid( InputArray image, Size patternSize, OutputArray centers, int flags=CALIB_CB_SYMMETRIC_GRID, const Ptr<FeatureDetector> &blobDetector = makePtr<SimpleBlobDetector>() )
.. ocv:pyfunction:: cv2.findCirclesGrid(image, patternSize[, centers[, flags[, blobDetector]]]) -> retval, centers

View File

@ -180,7 +180,7 @@ CV_EXPORTS_W void drawChessboardCorners( InputOutputArray image, Size patternSiz
//! finds circles' grid pattern of the specified size in the image
CV_EXPORTS_W bool findCirclesGrid( InputArray image, Size patternSize,
OutputArray centers, int flags = CALIB_CB_SYMMETRIC_GRID,
const Ptr<FeatureDetector> &blobDetector = new SimpleBlobDetector());
const Ptr<FeatureDetector> &blobDetector = makePtr<SimpleBlobDetector>());
//! finds intrinsic and extrinsic camera parameters from several fews of a known calibration pattern.
CV_EXPORTS_W double calibrateCamera( InputArrayOfArrays objectPoints,

View File

@ -271,8 +271,8 @@ int cvFindChessboardCorners( const void* arr, CvSize pattern_size,
if( !out_corners )
CV_Error( CV_StsNullPtr, "Null pointer to corners" );
storage = cvCreateMemStorage(0);
thresh_img = cvCreateMat( img->rows, img->cols, CV_8UC1 );
storage.reset(cvCreateMemStorage(0));
thresh_img.reset(cvCreateMat( img->rows, img->cols, CV_8UC1 ));
#ifdef DEBUG_CHESSBOARD
dbg_img = cvCreateImage(cvGetSize(img), IPL_DEPTH_8U, 3 );
@ -284,7 +284,7 @@ int cvFindChessboardCorners( const void* arr, CvSize pattern_size,
{
// equalize the input image histogram -
// that should make the contrast between "black" and "white" areas big enough
norm_img = cvCreateMat( img->rows, img->cols, CV_8UC1 );
norm_img.reset(cvCreateMat( img->rows, img->cols, CV_8UC1 ));
if( CV_MAT_CN(img->type) != 1 )
{
@ -541,12 +541,12 @@ int cvFindChessboardCorners( const void* arr, CvSize pattern_size,
cv::Ptr<CvMat> gray;
if( CV_MAT_CN(img->type) != 1 )
{
gray = cvCreateMat(img->rows, img->cols, CV_8UC1);
gray.reset(cvCreateMat(img->rows, img->cols, CV_8UC1));
cvCvtColor(img, gray, CV_BGR2GRAY);
}
else
{
gray = cvCloneMat(img);
gray.reset(cvCloneMat(img));
}
int wsize = 2;
cvFindCornerSubPix( gray, out_corners, pattern_size.width*pattern_size.height,
@ -627,7 +627,7 @@ icvOrderFoundConnectedQuads( int quad_count, CvCBQuad **quads,
int *all_count, CvCBQuad **all_quads, CvCBCorner **corners,
CvSize pattern_size, CvMemStorage* storage )
{
cv::Ptr<CvMemStorage> temp_storage = cvCreateChildMemStorage( storage );
cv::Ptr<CvMemStorage> temp_storage(cvCreateChildMemStorage( storage ));
CvSeq* stack = cvCreateSeq( 0, sizeof(*stack), sizeof(void*), temp_storage );
// first find an interior quad
@ -1109,7 +1109,7 @@ icvCleanFoundConnectedQuads( int quad_count, CvCBQuad **quad_group, CvSize patte
// create an array of quadrangle centers
cv::AutoBuffer<CvPoint2D32f> centers( quad_count );
cv::Ptr<CvMemStorage> temp_storage = cvCreateMemStorage(0);
cv::Ptr<CvMemStorage> temp_storage(cvCreateMemStorage(0));
for( i = 0; i < quad_count; i++ )
{
@ -1205,7 +1205,7 @@ static int
icvFindConnectedQuads( CvCBQuad *quad, int quad_count, CvCBQuad **out_group,
int group_idx, CvMemStorage* storage )
{
cv::Ptr<CvMemStorage> temp_storage = cvCreateChildMemStorage( storage );
cv::Ptr<CvMemStorage> temp_storage(cvCreateChildMemStorage( storage ));
CvSeq* stack = cvCreateSeq( 0, sizeof(*stack), sizeof(void*), temp_storage );
int i, count = 0;
@ -1674,7 +1674,7 @@ icvGenerateQuads( CvCBQuad **out_quads, CvCBCorner **out_corners,
min_size = 25; //cvRound( image->cols * image->rows * .03 * 0.01 * 0.92 );
// create temporary storage for contours and the sequence of pointers to found quadrangles
temp_storage = cvCreateChildMemStorage( storage );
temp_storage.reset(cvCreateChildMemStorage( storage ));
root = cvCreateSeq( 0, sizeof(CvSeq), sizeof(CvSeq*), temp_storage );
// initialize contour retrieving routine

View File

@ -568,7 +568,7 @@ CV_IMPL void cvProjectPoints2( const CvMat* objectPoints,
(objectPoints->rows == count && CV_MAT_CN(objectPoints->type)*objectPoints->cols == 3) ||
(objectPoints->rows == 3 && CV_MAT_CN(objectPoints->type) == 1 && objectPoints->cols == count)))
{
matM = cvCreateMat( objectPoints->rows, objectPoints->cols, CV_MAKETYPE(CV_64F,CV_MAT_CN(objectPoints->type)) );
matM.reset(cvCreateMat( objectPoints->rows, objectPoints->cols, CV_MAKETYPE(CV_64F,CV_MAT_CN(objectPoints->type)) ));
cvConvert(objectPoints, matM);
}
else
@ -584,7 +584,7 @@ CV_IMPL void cvProjectPoints2( const CvMat* objectPoints,
(imagePoints->rows == count && CV_MAT_CN(imagePoints->type)*imagePoints->cols == 2) ||
(imagePoints->rows == 2 && CV_MAT_CN(imagePoints->type) == 1 && imagePoints->cols == count)))
{
_m = cvCreateMat( imagePoints->rows, imagePoints->cols, CV_MAKETYPE(CV_64F,CV_MAT_CN(imagePoints->type)) );
_m.reset(cvCreateMat( imagePoints->rows, imagePoints->cols, CV_MAKETYPE(CV_64F,CV_MAT_CN(imagePoints->type)) ));
cvConvert(imagePoints, _m);
}
else
@ -664,10 +664,10 @@ CV_IMPL void cvProjectPoints2( const CvMat* objectPoints,
if( CV_MAT_TYPE(dpdr->type) == CV_64FC1 )
{
_dpdr = cvCloneMat(dpdr);
_dpdr.reset(cvCloneMat(dpdr));
}
else
_dpdr = cvCreateMat( 2*count, 3, CV_64FC1 );
_dpdr.reset(cvCreateMat( 2*count, 3, CV_64FC1 ));
dpdr_p = _dpdr->data.db;
dpdr_step = _dpdr->step/sizeof(dpdr_p[0]);
}
@ -682,10 +682,10 @@ CV_IMPL void cvProjectPoints2( const CvMat* objectPoints,
if( CV_MAT_TYPE(dpdt->type) == CV_64FC1 )
{
_dpdt = cvCloneMat(dpdt);
_dpdt.reset(cvCloneMat(dpdt));
}
else
_dpdt = cvCreateMat( 2*count, 3, CV_64FC1 );
_dpdt.reset(cvCreateMat( 2*count, 3, CV_64FC1 ));
dpdt_p = _dpdt->data.db;
dpdt_step = _dpdt->step/sizeof(dpdt_p[0]);
}
@ -699,10 +699,10 @@ CV_IMPL void cvProjectPoints2( const CvMat* objectPoints,
if( CV_MAT_TYPE(dpdf->type) == CV_64FC1 )
{
_dpdf = cvCloneMat(dpdf);
_dpdf.reset(cvCloneMat(dpdf));
}
else
_dpdf = cvCreateMat( 2*count, 2, CV_64FC1 );
_dpdf.reset(cvCreateMat( 2*count, 2, CV_64FC1 ));
dpdf_p = _dpdf->data.db;
dpdf_step = _dpdf->step/sizeof(dpdf_p[0]);
}
@ -716,10 +716,10 @@ CV_IMPL void cvProjectPoints2( const CvMat* objectPoints,
if( CV_MAT_TYPE(dpdc->type) == CV_64FC1 )
{
_dpdc = cvCloneMat(dpdc);
_dpdc.reset(cvCloneMat(dpdc));
}
else
_dpdc = cvCreateMat( 2*count, 2, CV_64FC1 );
_dpdc.reset(cvCreateMat( 2*count, 2, CV_64FC1 ));
dpdc_p = _dpdc->data.db;
dpdc_step = _dpdc->step/sizeof(dpdc_p[0]);
}
@ -736,10 +736,10 @@ CV_IMPL void cvProjectPoints2( const CvMat* objectPoints,
if( CV_MAT_TYPE(dpdk->type) == CV_64FC1 )
{
_dpdk = cvCloneMat(dpdk);
_dpdk.reset(cvCloneMat(dpdk));
}
else
_dpdk = cvCreateMat( dpdk->rows, dpdk->cols, CV_64FC1 );
_dpdk.reset(cvCreateMat( dpdk->rows, dpdk->cols, CV_64FC1 ));
dpdk_p = _dpdk->data.db;
dpdk_step = _dpdk->step/sizeof(dpdk_p[0]);
}
@ -950,8 +950,8 @@ CV_IMPL void cvFindExtrinsicCameraParams2( const CvMat* objectPoints,
CV_IS_MAT(A) && CV_IS_MAT(rvec) && CV_IS_MAT(tvec) );
count = MAX(objectPoints->cols, objectPoints->rows);
matM = cvCreateMat( 1, count, CV_64FC3 );
_m = cvCreateMat( 1, count, CV_64FC2 );
matM.reset(cvCreateMat( 1, count, CV_64FC3 ));
_m.reset(cvCreateMat( 1, count, CV_64FC2 ));
cvConvertPointsHomogeneous( objectPoints, matM );
cvConvertPointsHomogeneous( imagePoints, _m );
@ -963,8 +963,8 @@ CV_IMPL void cvFindExtrinsicCameraParams2( const CvMat* objectPoints,
CV_Assert( (CV_MAT_DEPTH(tvec->type) == CV_64F || CV_MAT_DEPTH(tvec->type) == CV_32F) &&
(tvec->rows == 1 || tvec->cols == 1) && tvec->rows*tvec->cols*CV_MAT_CN(tvec->type) == 3 );
_mn = cvCreateMat( 1, count, CV_64FC2 );
_Mxy = cvCreateMat( 1, count, CV_64FC2 );
_mn.reset(cvCreateMat( 1, count, CV_64FC2 ));
_Mxy.reset(cvCreateMat( 1, count, CV_64FC2 ));
// normalize image points
// (unapply the intrinsic matrix transformation and distortion)
@ -1055,7 +1055,7 @@ CV_IMPL void cvFindExtrinsicCameraParams2( const CvMat* objectPoints,
CvPoint3D64f* M = (CvPoint3D64f*)matM->data.db;
CvPoint2D64f* mn = (CvPoint2D64f*)_mn->data.db;
matL = cvCreateMat( 2*count, 12, CV_64F );
matL.reset(cvCreateMat( 2*count, 12, CV_64F ));
L = matL->data.db;
for( i = 0; i < count; i++, L += 24 )
@ -1162,11 +1162,11 @@ CV_IMPL void cvInitIntrinsicParams2D( const CvMat* objectPoints,
if( objectPoints->rows != 1 || imagePoints->rows != 1 )
CV_Error( CV_StsBadSize, "object points and image points must be a single-row matrices" );
matA = cvCreateMat( 2*nimages, 2, CV_64F );
_b = cvCreateMat( 2*nimages, 1, CV_64F );
matA.reset(cvCreateMat( 2*nimages, 2, CV_64F ));
_b.reset(cvCreateMat( 2*nimages, 1, CV_64F ));
a[2] = (imageSize.width - 1)*0.5;
a[5] = (imageSize.height - 1)*0.5;
_allH = cvCreateMat( nimages, 9, CV_64F );
_allH.reset(cvCreateMat( nimages, 9, CV_64F ));
// extract vanishing points in order to obtain initial value for the focal length
for( i = 0, pos = 0; i < nimages; i++, pos += ni )
@ -1310,16 +1310,16 @@ CV_IMPL double cvCalibrateCamera2( const CvMat* objectPoints,
total += ni;
}
matM = cvCreateMat( 1, total, CV_64FC3 );
_m = cvCreateMat( 1, total, CV_64FC2 );
matM.reset(cvCreateMat( 1, total, CV_64FC3 ));
_m.reset(cvCreateMat( 1, total, CV_64FC2 ));
cvConvertPointsHomogeneous( objectPoints, matM );
cvConvertPointsHomogeneous( imagePoints, _m );
nparams = NINTRINSIC + nimages*6;
_Ji = cvCreateMat( maxPoints*2, NINTRINSIC, CV_64FC1 );
_Je = cvCreateMat( maxPoints*2, 6, CV_64FC1 );
_err = cvCreateMat( maxPoints*2, 1, CV_64FC1 );
_Ji.reset(cvCreateMat( maxPoints*2, NINTRINSIC, CV_64FC1 ));
_Je.reset(cvCreateMat( maxPoints*2, 6, CV_64FC1 ));
_err.reset(cvCreateMat( maxPoints*2, 1, CV_64FC1 ));
cvZero( _Ji );
_k = cvMat( distCoeffs->rows, distCoeffs->cols, CV_MAKETYPE(CV_64F,CV_MAT_CN(distCoeffs->type)), k);
@ -1662,7 +1662,7 @@ double cvStereoCalibrate( const CvMat* _objectPoints, const CvMat* _imagePoints1
CV_MAT_TYPE(_npoints->type) == CV_32SC1 );
nimages = _npoints->cols + _npoints->rows - 1;
npoints = cvCreateMat( _npoints->rows, _npoints->cols, _npoints->type );
npoints.reset(cvCreateMat( _npoints->rows, _npoints->cols, _npoints->type ));
cvCopy( _npoints, npoints );
for( i = 0, pointsTotal = 0; i < nimages; i++ )
@ -1671,8 +1671,8 @@ double cvStereoCalibrate( const CvMat* _objectPoints, const CvMat* _imagePoints1
pointsTotal += npoints->data.i[i];
}
objectPoints = cvCreateMat( _objectPoints->rows, _objectPoints->cols,
CV_64FC(CV_MAT_CN(_objectPoints->type)));
objectPoints.reset(cvCreateMat( _objectPoints->rows, _objectPoints->cols,
CV_64FC(CV_MAT_CN(_objectPoints->type))));
cvConvert( _objectPoints, objectPoints );
cvReshape( objectPoints, objectPoints, 3, 1 );
@ -1691,7 +1691,7 @@ double cvStereoCalibrate( const CvMat* _objectPoints, const CvMat* _imagePoints1
K[k] = cvMat(3,3,CV_64F,A[k]);
Dist[k] = cvMat(1,8,CV_64F,dk[k]);
imagePoints[k] = cvCreateMat( points->rows, points->cols, CV_64FC(CV_MAT_CN(points->type)));
imagePoints[k].reset(cvCreateMat( points->rows, points->cols, CV_64FC(CV_MAT_CN(points->type))));
cvConvert( points, imagePoints[k] );
cvReshape( imagePoints[k], imagePoints[k], 2, 1 );
@ -1729,10 +1729,10 @@ double cvStereoCalibrate( const CvMat* _objectPoints, const CvMat* _imagePoints1
recomputeIntrinsics = (flags & CV_CALIB_FIX_INTRINSIC) == 0;
err = cvCreateMat( maxPoints*2, 1, CV_64F );
Je = cvCreateMat( maxPoints*2, 6, CV_64F );
J_LR = cvCreateMat( maxPoints*2, 6, CV_64F );
Ji = cvCreateMat( maxPoints*2, NINTRINSIC, CV_64F );
err.reset(cvCreateMat( maxPoints*2, 1, CV_64F ));
Je.reset(cvCreateMat( maxPoints*2, 6, CV_64F ));
J_LR.reset(cvCreateMat( maxPoints*2, 6, CV_64F ));
Ji.reset(cvCreateMat( maxPoints*2, NINTRINSIC, CV_64F ));
cvZero( Ji );
// we optimize for the inter-camera R(3),t(3), then, optionally,
@ -1740,7 +1740,7 @@ double cvStereoCalibrate( const CvMat* _objectPoints, const CvMat* _imagePoints1
nparams = 6*(nimages+1) + (recomputeIntrinsics ? NINTRINSIC*2 : 0);
// storage for initial [om(R){i}|t{i}] (in order to compute the median for each component)
RT0 = cvCreateMat( 6, nimages, CV_64F );
RT0.reset(cvCreateMat( 6, nimages, CV_64F ));
solver.init( nparams, 0, termCrit );
if( recomputeIntrinsics )
@ -2080,7 +2080,7 @@ icvGetRectangles( const CvMat* cameraMatrix, const CvMat* distCoeffs,
{
const int N = 9;
int x, y, k;
cv::Ptr<CvMat> _pts = cvCreateMat(1, N*N, CV_32FC2);
cv::Ptr<CvMat> _pts(cvCreateMat(1, N*N, CV_32FC2));
CvPoint2D32f* pts = (CvPoint2D32f*)(_pts->data.ptr);
for( y = k = 0; y < N; y++ )
@ -2439,10 +2439,10 @@ CV_IMPL int cvStereoRectifyUncalibrated(
npoints = _points1->rows * _points1->cols * CV_MAT_CN(_points1->type) / 2;
_m1 = cvCreateMat( _points1->rows, _points1->cols, CV_64FC(CV_MAT_CN(_points1->type)) );
_m2 = cvCreateMat( _points2->rows, _points2->cols, CV_64FC(CV_MAT_CN(_points2->type)) );
_lines1 = cvCreateMat( 1, npoints, CV_64FC3 );
_lines2 = cvCreateMat( 1, npoints, CV_64FC3 );
_m1.reset(cvCreateMat( _points1->rows, _points1->cols, CV_64FC(CV_MAT_CN(_points1->type)) ));
_m2.reset(cvCreateMat( _points2->rows, _points2->cols, CV_64FC(CV_MAT_CN(_points2->type)) ));
_lines1.reset(cvCreateMat( 1, npoints, CV_64FC3 ));
_lines2.reset(cvCreateMat( 1, npoints, CV_64FC3 ));
cvConvert( F0, &F );

View File

@ -53,7 +53,6 @@ using cv::Ptr;
CvLevMarq::CvLevMarq()
{
mask = prevParam = param = J = err = JtJ = JtJN = JtErr = JtJV = JtJW = Ptr<CvMat>();
lambdaLg10 = 0; state = DONE;
criteria = cvTermCriteria(0,0,0);
iters = 0;
@ -62,7 +61,6 @@ CvLevMarq::CvLevMarq()
CvLevMarq::CvLevMarq( int nparams, int nerrs, CvTermCriteria criteria0, bool _completeSymmFlag )
{
mask = prevParam = param = J = err = JtJ = JtJN = JtErr = JtJV = JtJW = Ptr<CvMat>();
init(nparams, nerrs, criteria0, _completeSymmFlag);
}
@ -89,19 +87,19 @@ void CvLevMarq::init( int nparams, int nerrs, CvTermCriteria criteria0, bool _co
{
if( !param || param->rows != nparams || nerrs != (err ? err->rows : 0) )
clear();
mask = cvCreateMat( nparams, 1, CV_8U );
mask.reset(cvCreateMat( nparams, 1, CV_8U ));
cvSet(mask, cvScalarAll(1));
prevParam = cvCreateMat( nparams, 1, CV_64F );
param = cvCreateMat( nparams, 1, CV_64F );
JtJ = cvCreateMat( nparams, nparams, CV_64F );
JtJN = cvCreateMat( nparams, nparams, CV_64F );
JtJV = cvCreateMat( nparams, nparams, CV_64F );
JtJW = cvCreateMat( nparams, 1, CV_64F );
JtErr = cvCreateMat( nparams, 1, CV_64F );
prevParam.reset(cvCreateMat( nparams, 1, CV_64F ));
param.reset(cvCreateMat( nparams, 1, CV_64F ));
JtJ.reset(cvCreateMat( nparams, nparams, CV_64F ));
JtJN.reset(cvCreateMat( nparams, nparams, CV_64F ));
JtJV.reset(cvCreateMat( nparams, nparams, CV_64F ));
JtJW.reset(cvCreateMat( nparams, 1, CV_64F ));
JtErr.reset(cvCreateMat( nparams, 1, CV_64F ));
if( nerrs > 0 )
{
J = cvCreateMat( nerrs, nparams, CV_64F );
err = cvCreateMat( nerrs, 1, CV_64F );
J.reset(cvCreateMat( nerrs, nparams, CV_64F ));
err.reset(cvCreateMat( nerrs, 1, CV_64F ));
}
prevErrNorm = DBL_MAX;
lambdaLg10 = -3;
@ -196,7 +194,7 @@ bool CvLevMarq::updateAlt( const CvMat*& _param, CvMat*& _JtJ, CvMat*& _JtErr, d
{
double change;
CV_Assert( err.empty() );
CV_Assert( !err );
if( state == DONE )
{
_param = param;

View File

@ -436,9 +436,9 @@ cv::Mat cv::findEssentialMat( InputArray _points1, InputArray _points2, double f
Mat E;
if( method == RANSAC )
createRANSACPointSetRegistrator(new EMEstimatorCallback, 5, threshold, prob)->run(points1, points2, E, _mask);
createRANSACPointSetRegistrator(makePtr<EMEstimatorCallback>(), 5, threshold, prob)->run(points1, points2, E, _mask);
else
createLMeDSPointSetRegistrator(new EMEstimatorCallback, 5, prob)->run(points1, points2, E, _mask);
createLMeDSPointSetRegistrator(makePtr<EMEstimatorCallback>(), 5, prob)->run(points1, points2, E, _mask);
return E;
}

View File

@ -307,7 +307,7 @@ cv::Mat cv::findHomography( InputArray _points1, InputArray _points2,
if( ransacReprojThreshold <= 0 )
ransacReprojThreshold = defaultRANSACReprojThreshold;
Ptr<PointSetRegistrator::Callback> cb = new HomographyEstimatorCallback;
Ptr<PointSetRegistrator::Callback> cb = makePtr<HomographyEstimatorCallback>();
if( method == 0 || npoints == 4 )
{
@ -334,7 +334,7 @@ cv::Mat cv::findHomography( InputArray _points1, InputArray _points2,
if( method == RANSAC || method == LMEDS )
cb->runKernel( src, dst, H );
Mat H8(8, 1, CV_64F, H.ptr<double>());
createLMSolver(new HomographyRefineCallback(src, dst), 10)->run(H8);
createLMSolver(makePtr<HomographyRefineCallback>(src, dst), 10)->run(H8);
}
}
@ -686,7 +686,7 @@ cv::Mat cv::findFundamentalMat( InputArray _points1, InputArray _points2,
if( npoints < 7 )
return Mat();
Ptr<PointSetRegistrator::Callback> cb = new FMEstimatorCallback;
Ptr<PointSetRegistrator::Callback> cb = makePtr<FMEstimatorCallback>();
int result;
if( npoints == 7 || method == FM_8POINT )

View File

@ -95,7 +95,7 @@ public:
int ptype = param0.type();
CV_Assert( (param0.cols == 1 || param0.rows == 1) && (ptype == CV_32F || ptype == CV_64F));
CV_Assert( !cb.empty() );
CV_Assert( cb );
int lx = param0.rows + param0.cols - 1;
param0.convertTo(x, CV_64F);
@ -220,7 +220,7 @@ CV_INIT_ALGORITHM(LMSolverImpl, "LMSolver",
Ptr<LMSolver> createLMSolver(const Ptr<LMSolver::Callback>& cb, int maxIters)
{
CV_Assert( !LMSolverImpl_info_auto.name().empty() );
return new LMSolverImpl(cb, maxIters);
return makePtr<LMSolverImpl>(cb, maxIters);
}
}

View File

@ -171,7 +171,7 @@ public:
RNG rng((uint64)-1);
CV_Assert( !cb.empty() );
CV_Assert( cb );
CV_Assert( confidence > 0 && confidence < 1 );
CV_Assert( count >= 0 && count2 == count );
@ -288,7 +288,7 @@ public:
RNG rng((uint64)-1);
CV_Assert( !cb.empty() );
CV_Assert( cb );
CV_Assert( confidence > 0 && confidence < 1 );
CV_Assert( count >= 0 && count2 == count );
@ -397,7 +397,8 @@ Ptr<PointSetRegistrator> createRANSACPointSetRegistrator(const Ptr<PointSetRegis
double _confidence, int _maxIters)
{
CV_Assert( !RANSACPointSetRegistrator_info_auto.name().empty() );
return new RANSACPointSetRegistrator(_cb, _modelPoints, _threshold, _confidence, _maxIters);
return Ptr<PointSetRegistrator>(
new RANSACPointSetRegistrator(_cb, _modelPoints, _threshold, _confidence, _maxIters));
}
@ -405,7 +406,8 @@ Ptr<PointSetRegistrator> createLMeDSPointSetRegistrator(const Ptr<PointSetRegist
int _modelPoints, double _confidence, int _maxIters)
{
CV_Assert( !LMeDSPointSetRegistrator_info_auto.name().empty() );
return new LMeDSPointSetRegistrator(_cb, _modelPoints, _confidence, _maxIters);
return Ptr<PointSetRegistrator>(
new LMeDSPointSetRegistrator(_cb, _modelPoints, _confidence, _maxIters));
}
class Affine3DEstimatorCallback : public PointSetRegistrator::Callback
@ -532,5 +534,5 @@ int cv::estimateAffine3D(InputArray _from, InputArray _to,
param1 = param1 <= 0 ? 3 : param1;
param2 = (param2 < epsilon) ? 0.99 : (param2 > 1 - epsilon) ? 0.99 : param2;
return createRANSACPointSetRegistrator(new Affine3DEstimatorCallback, 4, param1, param2)->run(dFrom, dTo, _out, _inliers);
return createRANSACPointSetRegistrator(makePtr<Affine3DEstimatorCallback>(), 4, param1, param2)->run(dFrom, dTo, _out, _inliers);
}

View File

@ -991,7 +991,7 @@ const char* StereoBMImpl::name_ = "StereoMatcher.BM";
cv::Ptr<cv::StereoBM> cv::createStereoBM(int _numDisparities, int _SADWindowSize)
{
return new StereoBMImpl(_numDisparities, _SADWindowSize);
return makePtr<StereoBMImpl>(_numDisparities, _SADWindowSize);
}
/* End of file. */

View File

@ -947,11 +947,12 @@ Ptr<StereoSGBM> createStereoSGBM(int minDisparity, int numDisparities, int SADWi
int speckleWindowSize, int speckleRange,
int mode)
{
return new StereoSGBMImpl(minDisparity, numDisparities, SADWindowSize,
P1, P2, disp12MaxDiff,
preFilterCap, uniquenessRatio,
speckleWindowSize, speckleRange,
mode);
return Ptr<StereoSGBM>(
new StereoSGBMImpl(minDisparity, numDisparities, SADWindowSize,
P1, P2, disp12MaxDiff,
preFilterCap, uniquenessRatio,
speckleWindowSize, speckleRange,
mode));
}
Rect getValidDisparityROI( Rect roi1, Rect roi2,

View File

@ -240,32 +240,32 @@ cvCorrectMatches(CvMat *F_, CvMat *points1_, CvMat *points2_, CvMat *new_points1
}
// Make sure F uses double precision
F = cvCreateMat(3,3,CV_64FC1);
F.reset(cvCreateMat(3,3,CV_64FC1));
cvConvert(F_, F);
// Make sure points1 uses double precision
points1 = cvCreateMat(points1_->rows,points1_->cols,CV_64FC2);
points1.reset(cvCreateMat(points1_->rows,points1_->cols,CV_64FC2));
cvConvert(points1_, points1);
// Make sure points2 uses double precision
points2 = cvCreateMat(points2_->rows,points2_->cols,CV_64FC2);
points2.reset(cvCreateMat(points2_->rows,points2_->cols,CV_64FC2));
cvConvert(points2_, points2);
tmp33 = cvCreateMat(3,3,CV_64FC1);
tmp31 = cvCreateMat(3,1,CV_64FC1), tmp31_2 = cvCreateMat(3,1,CV_64FC1);
T1i = cvCreateMat(3,3,CV_64FC1), T2i = cvCreateMat(3,3,CV_64FC1);
R1 = cvCreateMat(3,3,CV_64FC1), R2 = cvCreateMat(3,3,CV_64FC1);
TFT = cvCreateMat(3,3,CV_64FC1), TFTt = cvCreateMat(3,3,CV_64FC1), RTFTR = cvCreateMat(3,3,CV_64FC1);
U = cvCreateMat(3,3,CV_64FC1);
S = cvCreateMat(3,3,CV_64FC1);
V = cvCreateMat(3,3,CV_64FC1);
e1 = cvCreateMat(3,1,CV_64FC1), e2 = cvCreateMat(3,1,CV_64FC1);
tmp33.reset(cvCreateMat(3,3,CV_64FC1));
tmp31.reset(cvCreateMat(3,1,CV_64FC1)), tmp31_2.reset(cvCreateMat(3,1,CV_64FC1));
T1i.reset(cvCreateMat(3,3,CV_64FC1)), T2i.reset(cvCreateMat(3,3,CV_64FC1));
R1.reset(cvCreateMat(3,3,CV_64FC1)), R2.reset(cvCreateMat(3,3,CV_64FC1));
TFT.reset(cvCreateMat(3,3,CV_64FC1)), TFTt.reset(cvCreateMat(3,3,CV_64FC1)), RTFTR.reset(cvCreateMat(3,3,CV_64FC1));
U.reset(cvCreateMat(3,3,CV_64FC1));
S.reset(cvCreateMat(3,3,CV_64FC1));
V.reset(cvCreateMat(3,3,CV_64FC1));
e1.reset(cvCreateMat(3,1,CV_64FC1)), e2.reset(cvCreateMat(3,1,CV_64FC1));
double x1, y1, x2, y2;
double scale;
double f1, f2, a, b, c, d;
polynomial = cvCreateMat(1,7,CV_64FC1);
result = cvCreateMat(1,6,CV_64FC2);
polynomial.reset(cvCreateMat(1,7,CV_64FC1));
result.reset(cvCreateMat(1,6,CV_64FC2));
double t_min, s_val, t, s;
for (int p = 0; p < points1->cols; ++p) {
// Replace F by T2-t * F * T1-t