Merge pull request #785 from taka-no-me:split_c_cpp2

This commit is contained in:
Andrey Kamaev 2013-04-09 08:53:49 +04:00 committed by OpenCV Buildbot
commit 913e6833d5
222 changed files with 2387 additions and 1986 deletions

View File

@ -1,3 +1,4 @@
set(HAVE_FFMPEG 1)
set(NEW_FFMPEG 1) set(NEW_FFMPEG 1)
set(HAVE_FFMPEG_CODEC 1) set(HAVE_FFMPEG_CODEC 1)
set(HAVE_FFMPEG_FORMAT 1) set(HAVE_FFMPEG_FORMAT 1)

View File

@ -1,4 +1,4 @@
SET(OPENCV_HAARTRAINING_DEPS opencv_core opencv_imgproc opencv_highgui opencv_objdetect opencv_calib3d opencv_video opencv_features2d opencv_flann opencv_legacy) SET(OPENCV_HAARTRAINING_DEPS opencv_core opencv_imgproc opencv_photo opencv_highgui opencv_objdetect opencv_calib3d opencv_video opencv_features2d opencv_flann opencv_legacy)
ocv_check_dependencies(${OPENCV_HAARTRAINING_DEPS}) ocv_check_dependencies(${OPENCV_HAARTRAINING_DEPS})
if(NOT OCV_DEPENDENCIES_FOUND) if(NOT OCV_DEPENDENCIES_FOUND)

View File

@ -1,4 +1,4 @@
set(OPENCV_TRAINCASCADE_DEPS opencv_core opencv_ml opencv_imgproc opencv_objdetect opencv_highgui opencv_calib3d opencv_video opencv_features2d opencv_flann opencv_legacy) set(OPENCV_TRAINCASCADE_DEPS opencv_core opencv_ml opencv_imgproc opencv_photo opencv_objdetect opencv_highgui opencv_calib3d opencv_video opencv_features2d opencv_flann opencv_legacy)
ocv_check_dependencies(${OPENCV_TRAINCASCADE_DEPS}) ocv_check_dependencies(${OPENCV_TRAINCASCADE_DEPS})
if(NOT OCV_DEPENDENCIES_FOUND) if(NOT OCV_DEPENDENCIES_FOUND)

View File

@ -1,6 +1,7 @@
#include "opencv2/core.hpp" #include "opencv2/core.hpp"
#include "opencv2/imgproc.hpp"
#include "opencv2/highgui.hpp"
#include "cv.h"
#include "imagestorage.h" #include "imagestorage.h"
#include <stdio.h> #include <stdio.h>
#include <iostream> #include <iostream>

View File

@ -29,7 +29,7 @@ if(WITH_GSTREAMER AND NOT WITH_GSTREAMER_1_X)
set(GSTREAMER_RIFF_VERSION ${ALIASOF_gstreamer-riff-0.10_VERSION}) set(GSTREAMER_RIFF_VERSION ${ALIASOF_gstreamer-riff-0.10_VERSION})
set(GSTREAMER_PBUTILS_VERSION ${ALIASOF_gstreamer-pbutils-0.10_VERSION}) set(GSTREAMER_PBUTILS_VERSION ${ALIASOF_gstreamer-pbutils-0.10_VERSION})
endif() endif()
endif(WITH_GSTREAMER AND NOT WITH_GSTREAMER_1_X) endif(WITH_GSTREAMER AND NOT WITH_GSTREAMER_1_X)
# if gstreamer 0.10 was not found, or we specified we wanted 1.x, try to find it # if gstreamer 0.10 was not found, or we specified we wanted 1.x, try to find it
@ -40,7 +40,7 @@ if(WITH_GSTREAMER_1_X OR NOT HAVE_GSTREAMER)
CHECK_MODULE(gstreamer-app-1.0 HAVE_GSTREAMER_APP) CHECK_MODULE(gstreamer-app-1.0 HAVE_GSTREAMER_APP)
CHECK_MODULE(gstreamer-riff-1.0 HAVE_GSTREAMER_RIFF) CHECK_MODULE(gstreamer-riff-1.0 HAVE_GSTREAMER_RIFF)
CHECK_MODULE(gstreamer-pbutils-1.0 HAVE_GSTREAMER_PBUTILS) CHECK_MODULE(gstreamer-pbutils-1.0 HAVE_GSTREAMER_PBUTILS)
if(HAVE_GSTREAMER_BASE AND HAVE_GSTREAMER_VIDEO AND HAVE_GSTREAMER_APP AND HAVE_GSTREAMER_RIFF AND HAVE_GSTREAMER_PBUTILS) if(HAVE_GSTREAMER_BASE AND HAVE_GSTREAMER_VIDEO AND HAVE_GSTREAMER_APP AND HAVE_GSTREAMER_RIFF AND HAVE_GSTREAMER_PBUTILS)
set(HAVE_GSTREAMER TRUE) set(HAVE_GSTREAMER TRUE)
set(GSTREAMER_BASE_VERSION ${ALIASOF_gstreamer-base-1.0_VERSION}) set(GSTREAMER_BASE_VERSION ${ALIASOF_gstreamer-base-1.0_VERSION})
@ -49,7 +49,7 @@ if(WITH_GSTREAMER_1_X OR NOT HAVE_GSTREAMER)
set(GSTREAMER_RIFF_VERSION ${ALIASOF_gstreamer-riff-1.0_VERSION}) set(GSTREAMER_RIFF_VERSION ${ALIASOF_gstreamer-riff-1.0_VERSION})
set(GSTREAMER_PBUTILS_VERSION ${ALIASOF_gstreamer-pbutils-1.0_VERSION}) set(GSTREAMER_PBUTILS_VERSION ${ALIASOF_gstreamer-pbutils-1.0_VERSION})
endif() endif()
endif(WITH_GSTREAMER_1_X OR NOT HAVE_GSTREAMER) endif(WITH_GSTREAMER_1_X OR NOT HAVE_GSTREAMER)
# --- unicap --- # --- unicap ---

View File

@ -62,7 +62,8 @@
#include "opencv2/core/core_c.h" #include "opencv2/core/core_c.h"
#include "opencv2/imgproc/imgproc_c.h" #include "opencv2/imgproc/imgproc_c.h"
#include "opencv2/imgproc.hpp" #include "opencv2/photo/photo_c.h"
#include "opencv2/video.hpp" #include "opencv2/video.hpp"
#include "opencv2/features2d.hpp" #include "opencv2/features2d.hpp"
#include "opencv2/flann.hpp" #include "opencv2/flann.hpp"

View File

@ -49,5 +49,8 @@
#include "cv.h" #include "cv.h"
#include "opencv2/core.hpp" #include "opencv2/core.hpp"
#include "opencv2/imgproc.hpp"
#include "opencv2/photo.hpp"
#include "opencv2/highgui.hpp"
#endif #endif

View File

@ -48,7 +48,8 @@
#include "opencv2/core/core_c.h" #include "opencv2/core/core_c.h"
#include "opencv2/imgproc/imgproc_c.h" #include "opencv2/imgproc/imgproc_c.h"
#include "opencv2/imgproc.hpp" #include "opencv2/photo/photo_c.h"
#include "opencv2/video.hpp" #include "opencv2/video.hpp"
#include "opencv2/features2d.hpp" #include "opencv2/features2d.hpp"
#include "opencv2/calib3d.hpp" #include "opencv2/calib3d.hpp"

View File

@ -44,6 +44,5 @@
#include "opencv2/core/core_c.h" #include "opencv2/core/core_c.h"
#include "opencv2/highgui/highgui_c.h" #include "opencv2/highgui/highgui_c.h"
#include "opencv2/highgui.hpp"
#endif #endif

View File

@ -66,7 +66,7 @@ void CV_ChessboardDetectorTimingTest::run( int start_from )
CvMat* _v = 0; CvMat* _v = 0;
CvPoint2D32f* v; CvPoint2D32f* v;
IplImage* img = 0; IplImage img;
IplImage* gray = 0; IplImage* gray = 0;
IplImage* thresh = 0; IplImage* thresh = 0;
@ -105,9 +105,10 @@ void CV_ChessboardDetectorTimingTest::run( int start_from )
/* read the image */ /* read the image */
sprintf( filename, "%s%s", filepath, imgname ); sprintf( filename, "%s%s", filepath, imgname );
img = cvLoadImage( filename ); cv::Mat img2 = cv::imread( filename );
img = img2;
if( !img ) if( img2.empty() )
{ {
ts->printf( cvtest::TS::LOG, "one of chessboard images can't be read: %s\n", filename ); ts->printf( cvtest::TS::LOG, "one of chessboard images can't be read: %s\n", filename );
if( max_idx == 1 ) if( max_idx == 1 )
@ -120,9 +121,9 @@ void CV_ChessboardDetectorTimingTest::run( int start_from )
ts->printf(cvtest::TS::LOG, "%s: chessboard %d:\n", imgname, is_chessboard); ts->printf(cvtest::TS::LOG, "%s: chessboard %d:\n", imgname, is_chessboard);
gray = cvCreateImage( cvSize( img->width, img->height ), IPL_DEPTH_8U, 1 ); gray = cvCreateImage( cvSize( img.width, img.height ), IPL_DEPTH_8U, 1 );
thresh = cvCreateImage( cvSize( img->width, img->height ), IPL_DEPTH_8U, 1 ); thresh = cvCreateImage( cvSize( img.width, img.height ), IPL_DEPTH_8U, 1 );
cvCvtColor( img, gray, CV_BGR2GRAY ); cvCvtColor( &img, gray, CV_BGR2GRAY );
count0 = pattern_size.width*pattern_size.height; count0 = pattern_size.width*pattern_size.height;
@ -164,7 +165,6 @@ void CV_ChessboardDetectorTimingTest::run( int start_from )
find_chessboard_time*1e-6, find_chessboard_time/num_pixels); find_chessboard_time*1e-6, find_chessboard_time/num_pixels);
cvReleaseMat( &_v ); cvReleaseMat( &_v );
cvReleaseImage( &img );
cvReleaseImage( &gray ); cvReleaseImage( &gray );
cvReleaseImage( &thresh ); cvReleaseImage( &thresh );
progress = update_progress( progress, idx-1, max_idx, 0 ); progress = update_progress( progress, idx-1, max_idx, 0 );
@ -175,7 +175,6 @@ _exit_:
/* release occupied memory */ /* release occupied memory */
cvReleaseMat( &_v ); cvReleaseMat( &_v );
cvReleaseFileStorage( &fs ); cvReleaseFileStorage( &fs );
cvReleaseImage( &img );
cvReleaseImage( &gray ); cvReleaseImage( &gray );
cvReleaseImage( &thresh ); cvReleaseImage( &thresh );

View File

@ -912,8 +912,7 @@ void ChamferMatcher::Template::show() const
#ifdef HAVE_OPENCV_HIGHGUI #ifdef HAVE_OPENCV_HIGHGUI
namedWindow("templ",1); namedWindow("templ",1);
imshow("templ",templ_color); imshow("templ",templ_color);
waitKey();
cvWaitKey(0);
#else #else
CV_Error(CV_StsNotImplemented, "OpenCV has been compiled without GUI support"); CV_Error(CV_StsNotImplemented, "OpenCV has been compiled without GUI support");
#endif #endif

View File

@ -30,7 +30,7 @@ circle
---------- ----------
Draws a circle. Draws a circle.
.. ocv:function:: void circle(Mat& img, Point center, int radius, const Scalar& color, int thickness=1, int lineType=8, int shift=0) .. ocv:function:: void circle( Mat& img, Point center, int radius, const Scalar& color, int thickness=1, int lineType=LINE_8, int shift=0 )
.. ocv:pyfunction:: cv2.circle(img, center, radius, color[, thickness[, lineType[, shift]]]) -> img .. ocv:pyfunction:: cv2.circle(img, center, radius, color[, thickness[, lineType[, shift]]]) -> img
@ -83,9 +83,9 @@ ellipse
----------- -----------
Draws a simple or thick elliptic arc or fills an ellipse sector. Draws a simple or thick elliptic arc or fills an ellipse sector.
.. ocv:function:: void ellipse(Mat& img, Point center, Size axes, double angle, double startAngle, double endAngle, const Scalar& color, int thickness=1, int lineType=8, int shift=0) .. ocv:function:: void ellipse( Mat& img, Point center, Size axes, double angle, double startAngle, double endAngle, const Scalar& color, int thickness=1, int lineType=LINE_8, int shift=0 )
.. ocv:function:: void ellipse(Mat& img, const RotatedRect& box, const Scalar& color, int thickness=1, int lineType=8) .. ocv:function:: void ellipse( Mat& img, const RotatedRect& box, const Scalar& color, int thickness=1, int lineType=LINE_8 )
.. ocv:pyfunction:: cv2.ellipse(img, center, axes, angle, startAngle, endAngle, color[, thickness[, lineType[, shift]]]) -> img .. ocv:pyfunction:: cv2.ellipse(img, center, axes, angle, startAngle, endAngle, color[, thickness[, lineType[, shift]]]) -> img
@ -162,7 +162,9 @@ fillConvexPoly
------------------ ------------------
Fills a convex polygon. Fills a convex polygon.
.. ocv:function:: void fillConvexPoly(Mat& img, const Point* pts, int npts, const Scalar& color, int lineType=8, int shift=0) .. ocv:function:: void fillConvexPoly( Mat& img, const Point* pts, int npts, const Scalar& color, int lineType=LINE_8, int shift=0 )
.. ocv:function:: void fillConvexPoly( InputOutputArray img, InputArray points, const Scalar& color, int lineType=LINE_8, int shift=0 )
.. ocv:pyfunction:: cv2.fillConvexPoly(img, points, color[, lineType[, shift]]) -> img .. ocv:pyfunction:: cv2.fillConvexPoly(img, points, color[, lineType[, shift]]) -> img
@ -192,7 +194,9 @@ fillPoly
------------ ------------
Fills the area bounded by one or more polygons. Fills the area bounded by one or more polygons.
.. ocv:function:: void fillPoly(Mat& img, const Point** pts, const int* npts, int ncontours, const Scalar& color, int lineType=8, int shift=0, Point offset=Point() ) .. ocv:function:: void fillPoly( Mat& img, const Point** pts, const int* npts, int ncontours, const Scalar& color, int lineType=LINE_8, int shift=0, Point offset=Point() )
.. ocv:function:: void fillPoly( InputOutputArray img, InputArrayOfArrays pts, const Scalar& color, int lineType=LINE_8, int shift=0, Point offset=Point() )
.. ocv:pyfunction:: cv2.fillPoly(img, pts, color[, lineType[, shift[, offset]]]) -> img .. ocv:pyfunction:: cv2.fillPoly(img, pts, color[, lineType[, shift[, offset]]]) -> img
@ -330,7 +334,7 @@ line
-------- --------
Draws a line segment connecting two points. Draws a line segment connecting two points.
.. ocv:function:: void line(Mat& img, Point pt1, Point pt2, const Scalar& color, int thickness=1, int lineType=8, int shift=0) .. ocv:function:: void line( Mat& img, Point pt1, Point pt2, const Scalar& color, int thickness=1, int lineType=LINE_8, int shift=0 )
.. ocv:pyfunction:: cv2.line(img, pt1, pt2, color[, thickness[, lineType[, shift]]]) -> img .. ocv:pyfunction:: cv2.line(img, pt1, pt2, color[, thickness[, lineType[, shift]]]) -> img
@ -418,9 +422,9 @@ rectangle
------------- -------------
Draws a simple, thick, or filled up-right rectangle. Draws a simple, thick, or filled up-right rectangle.
.. ocv:function:: void rectangle(Mat& img, Point pt1, Point pt2, const Scalar& color, int thickness=1, int lineType=8, int shift=0) .. ocv:function:: void rectangle( Mat& img, Point pt1, Point pt2, const Scalar& color, int thickness=1, int lineType=LINE_8, int shift=0 )
.. ocv:function:: void rectangle( Mat& img, Rect rec, const Scalar& color, int thickness=1, int lineType=8, int shift=0 ) .. ocv:function:: void rectangle( Mat& img, Rect rec, const Scalar& color, int thickness=1, int lineType=LINE_8, int shift=0 )
.. ocv:pyfunction:: cv2.rectangle(img, pt1, pt2, color[, thickness[, lineType[, shift]]]) -> img .. ocv:pyfunction:: cv2.rectangle(img, pt1, pt2, color[, thickness[, lineType[, shift]]]) -> img
@ -452,9 +456,9 @@ polylines
------------- -------------
Draws several polygonal curves. Draws several polygonal curves.
.. ocv:function:: void polylines( Mat& img, const Point* const* pts, const int* npts, int ncontours, bool isClosed, const Scalar& color, int thickness=1, int lineType=8, int shift=0 ) .. ocv:function:: void polylines( Mat& img, const Point* const* pts, const int* npts, int ncontours, bool isClosed, const Scalar& color, int thickness=1, int lineType=LINE_8, int shift=0 )
.. ocv:function:: void polylines( InputOutputArray img, InputArrayOfArrays pts, bool isClosed, const Scalar& color, int thickness=1, int lineType=8, int shift=0 ) .. ocv:function:: void polylines( InputOutputArray img, InputArrayOfArrays pts, bool isClosed, const Scalar& color, int thickness=1, int lineType=LINE_8, int shift=0 )
.. ocv:pyfunction:: cv2.polylines(img, pts, isClosed, color[, thickness[, lineType[, shift]]]) -> img .. ocv:pyfunction:: cv2.polylines(img, pts, isClosed, color[, thickness[, lineType[, shift]]]) -> img
@ -487,7 +491,7 @@ drawContours
---------------- ----------------
Draws contours outlines or filled contours. Draws contours outlines or filled contours.
.. ocv:function:: void drawContours( InputOutputArray image, InputArrayOfArrays contours, int contourIdx, const Scalar& color, int thickness=1, int lineType=8, InputArray hierarchy=noArray(), int maxLevel=INT_MAX, Point offset=Point() ) .. ocv:function:: void drawContours( InputOutputArray image, InputArrayOfArrays contours, int contourIdx, const Scalar& color, int thickness=1, int lineType=LINE_8, InputArray hierarchy=noArray(), int maxLevel=INT_MAX, Point offset=Point() )
.. ocv:pyfunction:: cv2.drawContours(image, contours, contourIdx, color[, thickness[, lineType[, hierarchy[, maxLevel[, offset]]]]]) -> image .. ocv:pyfunction:: cv2.drawContours(image, contours, contourIdx, color[, thickness[, lineType[, hierarchy[, maxLevel[, offset]]]]]) -> image
@ -570,11 +574,12 @@ putText
----------- -----------
Draws a text string. Draws a text string.
.. ocv:function:: void putText( Mat& img, const String& text, Point org, int fontFace, double fontScale, Scalar color, int thickness=1, int lineType=8, bool bottomLeftOrigin=false ) .. ocv:function:: void putText( Mat& img, const String& text, Point org, int fontFace, double fontScale, Scalar color, int thickness=1, int lineType=LINE_8, bool bottomLeftOrigin=false )
.. ocv:pyfunction:: cv2.putText(img, text, org, fontFace, fontScale, color[, thickness[, lineType[, bottomLeftOrigin]]]) -> None .. ocv:pyfunction:: cv2.putText(img, text, org, fontFace, fontScale, color[, thickness[, lineType[, bottomLeftOrigin]]]) -> None
.. ocv:cfunction:: void cvPutText( CvArr* img, const char* text, CvPoint org, const CvFont* font, CvScalar color ) .. ocv:cfunction:: void cvPutText( CvArr* img, const char* text, CvPoint org, const CvFont* font, CvScalar color )
.. ocv:pyoldfunction:: cv.PutText(img, text, org, font, color)-> None .. ocv:pyoldfunction:: cv.PutText(img, text, org, font, color)-> None
:param img: Image. :param img: Image.

View File

@ -496,67 +496,73 @@ CV_EXPORTS void randShuffle(InputOutputArray dst, double iterFactor = 1., RNG* r
CV_EXPORTS_AS(randShuffle) void randShuffle_(InputOutputArray dst, double iterFactor = 1.); CV_EXPORTS_AS(randShuffle) void randShuffle_(InputOutputArray dst, double iterFactor = 1.);
enum { FILLED = -1,
LINE_4 = 4,
LINE_8 = 8,
LINE_AA = 16
};
//! draws the line segment (pt1, pt2) in the image //! draws the line segment (pt1, pt2) in the image
CV_EXPORTS_W void line(CV_IN_OUT Mat& img, Point pt1, Point pt2, const Scalar& color, CV_EXPORTS_W void line(CV_IN_OUT Mat& img, Point pt1, Point pt2, const Scalar& color,
int thickness = 1, int lineType = 8, int shift = 0); int thickness = 1, int lineType = LINE_8, int shift = 0);
//! draws the rectangle outline or a solid rectangle with the opposite corners pt1 and pt2 in the image //! draws the rectangle outline or a solid rectangle with the opposite corners pt1 and pt2 in the image
CV_EXPORTS_W void rectangle(CV_IN_OUT Mat& img, Point pt1, Point pt2, CV_EXPORTS_W void rectangle(CV_IN_OUT Mat& img, Point pt1, Point pt2,
const Scalar& color, int thickness = 1, const Scalar& color, int thickness = 1,
int lineType = 8, int shift = 0); int lineType = LINE_8, int shift = 0);
//! draws the rectangle outline or a solid rectangle covering rec in the image //! draws the rectangle outline or a solid rectangle covering rec in the image
CV_EXPORTS void rectangle(CV_IN_OUT Mat& img, Rect rec, CV_EXPORTS void rectangle(CV_IN_OUT Mat& img, Rect rec,
const Scalar& color, int thickness = 1, const Scalar& color, int thickness = 1,
int lineType = 8, int shift = 0); int lineType = LINE_8, int shift = 0);
//! draws the circle outline or a solid circle in the image //! draws the circle outline or a solid circle in the image
CV_EXPORTS_W void circle(CV_IN_OUT Mat& img, Point center, int radius, CV_EXPORTS_W void circle(CV_IN_OUT Mat& img, Point center, int radius,
const Scalar& color, int thickness = 1, const Scalar& color, int thickness = 1,
int lineType = 8, int shift = 0); int lineType = LINE_8, int shift = 0);
//! draws an elliptic arc, ellipse sector or a rotated ellipse in the image //! draws an elliptic arc, ellipse sector or a rotated ellipse in the image
CV_EXPORTS_W void ellipse(CV_IN_OUT Mat& img, Point center, Size axes, CV_EXPORTS_W void ellipse(CV_IN_OUT Mat& img, Point center, Size axes,
double angle, double startAngle, double endAngle, double angle, double startAngle, double endAngle,
const Scalar& color, int thickness = 1, const Scalar& color, int thickness = 1,
int lineType = 8, int shift = 0); int lineType = LINE_8, int shift = 0);
//! draws a rotated ellipse in the image //! draws a rotated ellipse in the image
CV_EXPORTS_W void ellipse(CV_IN_OUT Mat& img, const RotatedRect& box, const Scalar& color, CV_EXPORTS_W void ellipse(CV_IN_OUT Mat& img, const RotatedRect& box, const Scalar& color,
int thickness = 1, int lineType = 8); int thickness = 1, int lineType = LINE_8);
//! draws a filled convex polygon in the image //! draws a filled convex polygon in the image
CV_EXPORTS void fillConvexPoly(Mat& img, const Point* pts, int npts, CV_EXPORTS void fillConvexPoly(Mat& img, const Point* pts, int npts,
const Scalar& color, int lineType = 8, const Scalar& color, int lineType = LINE_8,
int shift = 0); int shift = 0);
CV_EXPORTS_W void fillConvexPoly(InputOutputArray img, InputArray points, CV_EXPORTS_W void fillConvexPoly(InputOutputArray img, InputArray points,
const Scalar& color, int lineType = 8, const Scalar& color, int lineType = LINE_8,
int shift = 0); int shift = 0);
//! fills an area bounded by one or more polygons //! fills an area bounded by one or more polygons
CV_EXPORTS void fillPoly(Mat& img, const Point** pts, CV_EXPORTS void fillPoly(Mat& img, const Point** pts,
const int* npts, int ncontours, const int* npts, int ncontours,
const Scalar& color, int lineType = 8, int shift = 0, const Scalar& color, int lineType = LINE_8, int shift = 0,
Point offset = Point() ); Point offset = Point() );
CV_EXPORTS_W void fillPoly(InputOutputArray img, InputArrayOfArrays pts, CV_EXPORTS_W void fillPoly(InputOutputArray img, InputArrayOfArrays pts,
const Scalar& color, int lineType = 8, int shift = 0, const Scalar& color, int lineType = LINE_8, int shift = 0,
Point offset = Point() ); Point offset = Point() );
//! draws one or more polygonal curves //! draws one or more polygonal curves
CV_EXPORTS void polylines(Mat& img, const Point* const* pts, const int* npts, CV_EXPORTS void polylines(Mat& img, const Point* const* pts, const int* npts,
int ncontours, bool isClosed, const Scalar& color, int ncontours, bool isClosed, const Scalar& color,
int thickness = 1, int lineType = 8, int shift = 0 ); int thickness = 1, int lineType = LINE_8, int shift = 0 );
CV_EXPORTS_W void polylines(InputOutputArray img, InputArrayOfArrays pts, CV_EXPORTS_W void polylines(InputOutputArray img, InputArrayOfArrays pts,
bool isClosed, const Scalar& color, bool isClosed, const Scalar& color,
int thickness = 1, int lineType = 8, int shift = 0 ); int thickness = 1, int lineType = LINE_8, int shift = 0 );
//! draws contours in the image //! draws contours in the image
CV_EXPORTS_W void drawContours( InputOutputArray image, InputArrayOfArrays contours, CV_EXPORTS_W void drawContours( InputOutputArray image, InputArrayOfArrays contours,
int contourIdx, const Scalar& color, int contourIdx, const Scalar& color,
int thickness = 1, int lineType = 8, int thickness = 1, int lineType = LINE_8,
InputArray hierarchy = noArray(), InputArray hierarchy = noArray(),
int maxLevel = INT_MAX, Point offset = Point() ); int maxLevel = INT_MAX, Point offset = Point() );
@ -587,7 +593,7 @@ enum
//! renders text string in the image //! renders text string in the image
CV_EXPORTS_W void putText( Mat& img, const String& text, Point org, CV_EXPORTS_W void putText( Mat& img, const String& text, Point org,
int fontFace, double fontScale, Scalar color, int fontFace, double fontScale, Scalar color,
int thickness = 1, int lineType = 8, int thickness = 1, int lineType = LINE_8,
bool bottomLeftOrigin = false ); bool bottomLeftOrigin = false );
//! returns bounding box of the text string //! returns bounding box of the text string
@ -631,9 +637,9 @@ CV_EXPORTS ConvertScaleData getConvertScaleElem(int fromType, int toType);
PCA pca(pcaset, // pass the data PCA pca(pcaset, // pass the data
Mat(), // we do not have a pre-computed mean vector, Mat(), // we do not have a pre-computed mean vector,
// so let the PCA engine to compute it // so let the PCA engine to compute it
CV_PCA_DATA_AS_ROW, // indicate that the vectors PCA::DATA_AS_ROW, // indicate that the vectors
// are stored as matrix rows // are stored as matrix rows
// (use CV_PCA_DATA_AS_COL if the vectors are // (use PCA::DATA_AS_COL if the vectors are
// the matrix columns) // the matrix columns)
maxComponents // specify, how many principal components to retain maxComponents // specify, how many principal components to retain
); );
@ -663,6 +669,11 @@ CV_EXPORTS ConvertScaleData getConvertScaleElem(int fromType, int toType);
class CV_EXPORTS PCA class CV_EXPORTS PCA
{ {
public: public:
enum { DATA_AS_ROW = 0,
DATA_AS_COL = 1,
USE_AVG = 2
};
//! default constructor //! default constructor
PCA(); PCA();

View File

@ -43,10 +43,9 @@
#ifndef __OPENCV_FEATURES_2D_HPP__ #ifndef __OPENCV_FEATURES_2D_HPP__
#define __OPENCV_FEATURES_2D_HPP__ #define __OPENCV_FEATURES_2D_HPP__
#include "opencv2/flann/miniflann.hpp"
#ifdef __cplusplus #ifdef __cplusplus
#include "opencv2/core.hpp" #include "opencv2/core.hpp"
#include "opencv2/flann/miniflann.hpp"
#include <limits> #include <limits>
namespace cv namespace cv

View File

@ -43,9 +43,6 @@
#ifndef _OPENCV_FLANN_HPP_ #ifndef _OPENCV_FLANN_HPP_
#define _OPENCV_FLANN_HPP_ #define _OPENCV_FLANN_HPP_
#ifdef __cplusplus
#include "opencv2/core/types_c.h"
#include "opencv2/core.hpp" #include "opencv2/core.hpp"
#include "opencv2/flann/miniflann.hpp" #include "opencv2/flann/miniflann.hpp"
#include "opencv2/flann/flann_base.hpp" #include "opencv2/flann/flann_base.hpp"
@ -422,6 +419,4 @@ FLANN_DEPRECATED int hierarchicalClustering(const Mat& features, Mat& centers, c
} } // namespace cv::flann } } // namespace cv::flann
#endif // __cplusplus
#endif #endif

View File

@ -43,8 +43,6 @@
#ifndef _OPENCV_MINIFLANN_HPP_ #ifndef _OPENCV_MINIFLANN_HPP_
#define _OPENCV_MINIFLANN_HPP_ #define _OPENCV_MINIFLANN_HPP_
#ifdef __cplusplus
#include "opencv2/core.hpp" #include "opencv2/core.hpp"
#include "opencv2/flann/defines.h" #include "opencv2/flann/defines.h"
@ -157,6 +155,4 @@ protected:
} } // namespace cv::flann } } // namespace cv::flann
#endif // __cplusplus
#endif #endif

View File

@ -74,13 +74,13 @@ void CV_LshTableBadArgTest::run( int /* start_from */ )
int errors = 0; int errors = 0;
caller.key_size = 0; caller.key_size = 0;
errors += run_test_case(CV_StsBadArg, "key_size is zero", caller); errors += run_test_case(Error::StsBadArg, "key_size is zero", caller);
caller.key_size = static_cast<int>(sizeof(size_t) * CHAR_BIT); caller.key_size = static_cast<int>(sizeof(size_t) * CHAR_BIT);
errors += run_test_case(CV_StsBadArg, "key_size is too big", caller); errors += run_test_case(Error::StsBadArg, "key_size is too big", caller);
caller.key_size += cvtest::randInt(rng) % 100; caller.key_size += cvtest::randInt(rng) % 100;
errors += run_test_case(CV_StsBadArg, "key_size is too big", caller); errors += run_test_case(Error::StsBadArg, "key_size is too big", caller);
if (errors != 0) if (errors != 0)
ts->set_failed_test_info(cvtest::TS::FAIL_MISMATCH); ts->set_failed_test_info(cvtest::TS::FAIL_MISMATCH);

View File

@ -1815,12 +1815,17 @@ PERF_TEST_P(Sz_Dp_MinDist, ImgProc_HoughCircles,
////////////////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////////////////
// GeneralizedHough // GeneralizedHough
CV_FLAGS(GHMethod, cv::GHT_POSITION, cv::GHT_SCALE, cv::GHT_ROTATION); enum { GHT_POSITION = cv::GeneralizedHough::GHT_POSITION,
GHT_SCALE = cv::GeneralizedHough::GHT_SCALE,
GHT_ROTATION = cv::GeneralizedHough::GHT_ROTATION
};
CV_FLAGS(GHMethod, GHT_POSITION, GHT_SCALE, GHT_ROTATION);
DEF_PARAM_TEST(Method_Sz, GHMethod, cv::Size); DEF_PARAM_TEST(Method_Sz, GHMethod, cv::Size);
PERF_TEST_P(Method_Sz, ImgProc_GeneralizedHough, PERF_TEST_P(Method_Sz, ImgProc_GeneralizedHough,
Combine(Values(GHMethod(cv::GHT_POSITION), GHMethod(cv::GHT_POSITION | cv::GHT_SCALE), GHMethod(cv::GHT_POSITION | cv::GHT_ROTATION), GHMethod(cv::GHT_POSITION | cv::GHT_SCALE | cv::GHT_ROTATION)), Combine(Values(GHMethod(GHT_POSITION), GHMethod(GHT_POSITION | GHT_SCALE), GHMethod(GHT_POSITION | GHT_ROTATION), GHMethod(GHT_POSITION | GHT_SCALE | GHT_ROTATION)),
GPU_TYPICAL_MAT_SIZES)) GPU_TYPICAL_MAT_SIZES))
{ {
declare.time(10); declare.time(10);
@ -1870,7 +1875,7 @@ PERF_TEST_P(Method_Sz, ImgProc_GeneralizedHough,
cv::gpu::GpuMat posAndVotes; cv::gpu::GpuMat posAndVotes;
cv::Ptr<cv::gpu::GeneralizedHough_GPU> d_hough = cv::gpu::GeneralizedHough_GPU::create(method); cv::Ptr<cv::gpu::GeneralizedHough_GPU> d_hough = cv::gpu::GeneralizedHough_GPU::create(method);
if (method & cv::GHT_ROTATION) if (method & GHT_ROTATION)
{ {
d_hough->set("maxAngle", 90.0); d_hough->set("maxAngle", 90.0);
d_hough->set("angleStep", 2.0); d_hough->set("angleStep", 2.0);
@ -1888,7 +1893,7 @@ PERF_TEST_P(Method_Sz, ImgProc_GeneralizedHough,
cv::Mat positions; cv::Mat positions;
cv::Ptr<cv::GeneralizedHough> hough = cv::GeneralizedHough::create(method); cv::Ptr<cv::GeneralizedHough> hough = cv::GeneralizedHough::create(method);
if (method & cv::GHT_ROTATION) if (method & GHT_ROTATION)
{ {
hough->set("maxAngle", 90.0); hough->set("maxAngle", 90.0);
hough->set("angleStep", 2.0); hough->set("angleStep", 2.0);

View File

@ -221,6 +221,7 @@ endif()
if(WIN32) if(WIN32)
link_directories("${OpenCV_SOURCE_DIR}/3rdparty/lib") # for ffmpeg wrapper only link_directories("${OpenCV_SOURCE_DIR}/3rdparty/lib") # for ffmpeg wrapper only
include_directories(AFTER SYSTEM "${OpenCV_SOURCE_DIR}/3rdparty/include") # for directshow in VS2005 and multi-monitor support on MinGW include_directories(AFTER SYSTEM "${OpenCV_SOURCE_DIR}/3rdparty/include") # for directshow in VS2005 and multi-monitor support on MinGW
include_directories(AFTER SYSTEM "${OpenCV_SOURCE_DIR}/3rdparty/include/ffmpeg_") # for tests
endif() endif()
if(UNIX) if(UNIX)

View File

@ -123,7 +123,7 @@ fontQt
---------- ----------
Creates the font to draw a text on an image. Creates the font to draw a text on an image.
.. ocv:function:: CvFont fontQt(const String& nameFont, int pointSize = -1, Scalar color = Scalar::all(0), int weight = CV_FONT_NORMAL, int style = CV_STYLE_NORMAL, int spacing = 0) .. ocv:function:: QtFont fontQt( const String& nameFont, int pointSize=-1, Scalar color=Scalar::all(0), int weight=QT_FONT_NORMAL, int style=QT_STYLE_NORMAL, int spacing=0 )
.. ocv:cfunction:: CvFont cvFontQt(const char* nameFont, int pointSize=-1, CvScalar color=cvScalarAll(0), int weight=CV_FONT_NORMAL, int style=CV_STYLE_NORMAL, int spacing=0) .. ocv:cfunction:: CvFont cvFontQt(const char* nameFont, int pointSize=-1, CvScalar color=cvScalarAll(0), int weight=CV_FONT_NORMAL, int style=CV_STYLE_NORMAL, int spacing=0)
@ -169,7 +169,7 @@ addText
----------- -----------
Creates the font to draw a text on an image. Creates the font to draw a text on an image.
.. ocv:function:: void addText( const Mat& img, const String& text, Point org, CvFont font ) .. ocv:function:: void addText( const Mat& img, const String& text, Point org, const QtFont& font )
.. ocv:cfunction:: void cvAddText( const CvArr* img, const char* text, CvPoint org, CvFont * arg2 ) .. ocv:cfunction:: void cvAddText( const CvArr* img, const char* text, CvPoint org, CvFont * arg2 )
@ -302,7 +302,7 @@ createButton
---------------- ----------------
Attaches a button to the control panel. Attaches a button to the control panel.
.. ocv:function:: int createButton( const String& bar_name, ButtonCallback on_change, void* userdata=NULL, int type=CV_PUSH_BUTTON, bool initial_button_state=0 ) .. ocv:function:: int createButton( const String& bar_name, ButtonCallback on_change, void* userdata=0, int type=QT_PUSH_BUTTON, bool initial_button_state=false )
.. ocv:cfunction:: int cvCreateButton( const char* button_name=NULL, CvButtonCallback on_change=NULL, void* userdata=NULL, int button_type=CV_PUSH_BUTTON, int initial_button_state=0 ) .. ocv:cfunction:: int cvCreateButton( const char* button_name=NULL, CvButtonCallback on_change=NULL, void* userdata=NULL, int button_type=CV_PUSH_BUTTON, int initial_button_state=0 )

View File

@ -59,7 +59,7 @@ imread
------ ------
Loads an image from a file. Loads an image from a file.
.. ocv:function:: Mat imread( const String& filename, int flags=1 ) .. ocv:function:: Mat imread( const String& filename, int flags=IMREAD_COLOR )
.. ocv:pyfunction:: cv2.imread(filename[, flags]) -> retval .. ocv:pyfunction:: cv2.imread(filename[, flags]) -> retval
@ -321,9 +321,9 @@ VideoCapture::retrieve
---------------------- ----------------------
Decodes and returns the grabbed video frame. Decodes and returns the grabbed video frame.
.. ocv:function:: bool VideoCapture::retrieve(Mat& image, int channel=0) .. ocv:function:: bool VideoCapture::retrieve( Mat& image, int flag=0 )
.. ocv:pyfunction:: cv2.VideoCapture.retrieve([image[, channel]]) -> retval, image .. ocv:pyfunction:: cv2.VideoCapture.retrieve([image[, flag]]) -> retval, image
.. ocv:cfunction:: IplImage* cvRetrieveFrame( CvCapture* capture, int streamIdx=0 ) .. ocv:cfunction:: IplImage* cvRetrieveFrame( CvCapture* capture, int streamIdx=0 )

View File

@ -43,32 +43,81 @@
#ifndef __OPENCV_HIGHGUI_HPP__ #ifndef __OPENCV_HIGHGUI_HPP__
#define __OPENCV_HIGHGUI_HPP__ #define __OPENCV_HIGHGUI_HPP__
#include "opencv2/highgui/highgui_c.h"
#ifdef __cplusplus
#include "opencv2/core.hpp" #include "opencv2/core.hpp"
struct CvCapture;
struct CvVideoWriter;
///////////////////////// graphical user interface //////////////////////////
namespace cv namespace cv
{ {
enum { // Flags for namedWindow
// Flags for namedWindow enum { WINDOW_NORMAL = 0x00000000, // the user can resize the window (no constraint) / also use to switch a fullscreen window to a normal size
WINDOW_NORMAL = CV_WINDOW_NORMAL, // the user can resize the window (no constraint) / also use to switch a fullscreen window to a normal size WINDOW_AUTOSIZE = 0x00000001, // the user cannot resize the window, the size is constrainted by the image displayed
WINDOW_AUTOSIZE = CV_WINDOW_AUTOSIZE, // the user cannot resize the window, the size is constrainted by the image displayed WINDOW_OPENGL = 0x00001000, // window with opengl support
WINDOW_OPENGL = CV_WINDOW_OPENGL, // window with opengl support
WINDOW_FULLSCREEN = 1, // change the window to fullscreen
WINDOW_FREERATIO = 0x00000100, // the image expends as much as it can (no ratio constraint)
WINDOW_KEEPRATIO = 0x00000000 // the ratio of the image is respected
};
// Flags for set / getWindowProperty
enum { WND_PROP_FULLSCREEN = 0, // fullscreen property (can be WINDOW_NORMAL or WINDOW_FULLSCREEN)
WND_PROP_AUTOSIZE = 1, // autosize property (can be WINDOW_NORMAL or WINDOW_AUTOSIZE)
WND_PROP_ASPECT_RATIO = 2, // window's aspect ration (can be set to WINDOW_FREERATIO or WINDOW_KEEPRATIO);
WND_PROP_OPENGL = 3 // opengl support
};
enum { EVENT_MOUSEMOVE = 0,
EVENT_LBUTTONDOWN = 1,
EVENT_RBUTTONDOWN = 2,
EVENT_MBUTTONDOWN = 3,
EVENT_LBUTTONUP = 4,
EVENT_RBUTTONUP = 5,
EVENT_MBUTTONUP = 6,
EVENT_LBUTTONDBLCLK = 7,
EVENT_RBUTTONDBLCLK = 8,
EVENT_MBUTTONDBLCLK = 9
};
enum { EVENT_FLAG_LBUTTON = 1,
EVENT_FLAG_RBUTTON = 2,
EVENT_FLAG_MBUTTON = 4,
EVENT_FLAG_CTRLKEY = 8,
EVENT_FLAG_SHIFTKEY = 16,
EVENT_FLAG_ALTKEY = 32
};
// Qt font
enum { QT_FONT_LIGHT = 25, //QFont::Light,
QT_FONT_NORMAL = 50, //QFont::Normal,
QT_FONT_DEMIBOLD = 63, //QFont::DemiBold,
QT_FONT_BOLD = 75, //QFont::Bold,
QT_FONT_BLACK = 87 //QFont::Black
};
// Qt font style
enum { QT_STYLE_NORMAL = 0, //QFont::StyleNormal,
QT_STYLE_ITALIC = 1, //QFont::StyleItalic,
QT_STYLE_OBLIQUE = 2 //QFont::StyleOblique
};
// Qt "button" type
enum { QT_PUSH_BUTTON = 0,
QT_CHECKBOX = 1,
QT_RADIOBOX = 2
};
typedef void (*MouseCallback)(int event, int x, int y, int flags, void* userdata);
typedef void (*TrackbarCallback)(int pos, void* userdata);
typedef void (*OpenGlDrawCallback)(void* userdata);
typedef void (*ButtonCallback)(int state, void* userdata);
// Flags for set / getWindowProperty
WND_PROP_FULLSCREEN = CV_WND_PROP_FULLSCREEN, // fullscreen property
WND_PROP_AUTOSIZE = CV_WND_PROP_AUTOSIZE, // autosize property
WND_PROP_ASPECT_RATIO = CV_WND_PROP_ASPECTRATIO, // window's aspect ration
WND_PROP_OPENGL = CV_WND_PROP_OPENGL // opengl support
};
CV_EXPORTS_W void namedWindow(const String& winname, int flags = WINDOW_AUTOSIZE); CV_EXPORTS_W void namedWindow(const String& winname, int flags = WINDOW_AUTOSIZE);
CV_EXPORTS_W void destroyWindow(const String& winname); CV_EXPORTS_W void destroyWindow(const String& winname);
CV_EXPORTS_W void destroyAllWindows(); CV_EXPORTS_W void destroyAllWindows();
CV_EXPORTS_W int startWindowThread(); CV_EXPORTS_W int startWindowThread();
@ -78,123 +127,373 @@ CV_EXPORTS_W int waitKey(int delay = 0);
CV_EXPORTS_W void imshow(const String& winname, InputArray mat); CV_EXPORTS_W void imshow(const String& winname, InputArray mat);
CV_EXPORTS_W void resizeWindow(const String& winname, int width, int height); CV_EXPORTS_W void resizeWindow(const String& winname, int width, int height);
CV_EXPORTS_W void moveWindow(const String& winname, int x, int y); CV_EXPORTS_W void moveWindow(const String& winname, int x, int y);
CV_EXPORTS_W void setWindowProperty(const String& winname, int prop_id, double prop_value);//YV CV_EXPORTS_W void setWindowProperty(const String& winname, int prop_id, double prop_value);
CV_EXPORTS_W double getWindowProperty(const String& winname, int prop_id);//YV
enum CV_EXPORTS_W double getWindowProperty(const String& winname, int prop_id);
{
EVENT_MOUSEMOVE =0,
EVENT_LBUTTONDOWN =1,
EVENT_RBUTTONDOWN =2,
EVENT_MBUTTONDOWN =3,
EVENT_LBUTTONUP =4,
EVENT_RBUTTONUP =5,
EVENT_MBUTTONUP =6,
EVENT_LBUTTONDBLCLK =7,
EVENT_RBUTTONDBLCLK =8,
EVENT_MBUTTONDBLCLK =9
};
enum
{
EVENT_FLAG_LBUTTON =1,
EVENT_FLAG_RBUTTON =2,
EVENT_FLAG_MBUTTON =4,
EVENT_FLAG_CTRLKEY =8,
EVENT_FLAG_SHIFTKEY =16,
EVENT_FLAG_ALTKEY =32
};
typedef void (*MouseCallback)(int event, int x, int y, int flags, void* userdata);
//! assigns callback for mouse events //! assigns callback for mouse events
CV_EXPORTS void setMouseCallback(const String& winname, MouseCallback onMouse, void* userdata = 0); CV_EXPORTS void setMouseCallback(const String& winname, MouseCallback onMouse, void* userdata = 0);
typedef void (CV_CDECL *TrackbarCallback)(int pos, void* userdata);
CV_EXPORTS int createTrackbar(const String& trackbarname, const String& winname, CV_EXPORTS int createTrackbar(const String& trackbarname, const String& winname,
int* value, int count, int* value, int count,
TrackbarCallback onChange = 0, TrackbarCallback onChange = 0,
void* userdata = 0); void* userdata = 0);
CV_EXPORTS_W int getTrackbarPos(const String& trackbarname, const String& winname); CV_EXPORTS_W int getTrackbarPos(const String& trackbarname, const String& winname);
CV_EXPORTS_W void setTrackbarPos(const String& trackbarname, const String& winname, int pos); CV_EXPORTS_W void setTrackbarPos(const String& trackbarname, const String& winname, int pos);
// OpenGL support
typedef void (*OpenGlDrawCallback)(void* userdata); // OpenGL support
CV_EXPORTS void setOpenGlDrawCallback(const String& winname, OpenGlDrawCallback onOpenGlDraw, void* userdata = 0); CV_EXPORTS void setOpenGlDrawCallback(const String& winname, OpenGlDrawCallback onOpenGlDraw, void* userdata = 0);
CV_EXPORTS void setOpenGlContext(const String& winname); CV_EXPORTS void setOpenGlContext(const String& winname);
CV_EXPORTS void updateWindow(const String& winname); CV_EXPORTS void updateWindow(const String& winname);
//Only for Qt
CV_EXPORTS CvFont fontQt(const String& nameFont, int pointSize=-1, // Only for Qt
Scalar color=Scalar::all(0), int weight=CV_FONT_NORMAL,
int style=CV_STYLE_NORMAL, int spacing=0);
CV_EXPORTS void addText( const Mat& img, const String& text, Point org, CvFont font);
CV_EXPORTS void displayOverlay(const String& winname, const String& text, int delayms CV_DEFAULT(0)); struct QtFont
CV_EXPORTS void displayStatusBar(const String& winname, const String& text, int delayms CV_DEFAULT(0)); {
const char* nameFont; // Qt: nameFont
Scalar color; // Qt: ColorFont -> cvScalar(blue_component, green_component, red\_component[, alpha_component])
int font_face; // Qt: bool italic
const int* ascii; // font data and metrics
const int* greek;
const int* cyrillic;
float hscale, vscale;
float shear; // slope coefficient: 0 - normal, >0 - italic
int thickness; // Qt: weight
float dx; // horizontal interval between letters
int line_type; // Qt: PointSize
};
CV_EXPORTS QtFont fontQt(const String& nameFont, int pointSize = -1,
Scalar color = Scalar::all(0), int weight = QT_FONT_NORMAL,
int style = QT_STYLE_NORMAL, int spacing = 0);
CV_EXPORTS void addText( const Mat& img, const String& text, Point org, const QtFont& font);
CV_EXPORTS void displayOverlay(const String& winname, const String& text, int delayms = 0);
CV_EXPORTS void displayStatusBar(const String& winname, const String& text, int delayms = 0);
CV_EXPORTS void saveWindowParameters(const String& windowName); CV_EXPORTS void saveWindowParameters(const String& windowName);
CV_EXPORTS void loadWindowParameters(const String& windowName); CV_EXPORTS void loadWindowParameters(const String& windowName);
CV_EXPORTS int startLoop(int (*pt2Func)(int argc, char *argv[]), int argc, char* argv[]); CV_EXPORTS int startLoop(int (*pt2Func)(int argc, char *argv[]), int argc, char* argv[]);
CV_EXPORTS void stopLoop(); CV_EXPORTS void stopLoop();
typedef void (CV_CDECL *ButtonCallback)(int state, void* userdata);
CV_EXPORTS int createButton( const String& bar_name, ButtonCallback on_change, CV_EXPORTS int createButton( const String& bar_name, ButtonCallback on_change,
void* userdata=NULL, int type=CV_PUSH_BUTTON, void* userdata = 0, int type = QT_PUSH_BUTTON,
bool initial_button_state=0); bool initial_button_state = false);
//------------------------- } // cv
enum
//////////////////////////////// image codec ////////////////////////////////
namespace cv
{ {
// 8bit, color or not
IMREAD_UNCHANGED =-1,
// 8bit, gray
IMREAD_GRAYSCALE =0,
// ?, color
IMREAD_COLOR =1,
// any depth, ?
IMREAD_ANYDEPTH =2,
// ?, any color
IMREAD_ANYCOLOR =4
};
enum enum { IMREAD_UNCHANGED = -1, // 8bit, color or not
{ IMREAD_GRAYSCALE = 0, // 8bit, gray
IMWRITE_JPEG_QUALITY =1, IMREAD_COLOR = 1, // ?, color
IMWRITE_PNG_COMPRESSION =16, IMREAD_ANYDEPTH = 2, // any depth, ?
IMWRITE_PNG_STRATEGY =17, IMREAD_ANYCOLOR = 4 // ?, any color
IMWRITE_PNG_BILEVEL =18, };
IMWRITE_PNG_STRATEGY_DEFAULT =0,
IMWRITE_PNG_STRATEGY_FILTERED =1, enum { IMWRITE_JPEG_QUALITY = 1,
IMWRITE_PNG_STRATEGY_HUFFMAN_ONLY =2, IMWRITE_PNG_COMPRESSION = 16,
IMWRITE_PNG_STRATEGY_RLE =3, IMWRITE_PNG_STRATEGY = 17,
IMWRITE_PNG_STRATEGY_FIXED =4, IMWRITE_PNG_BILEVEL = 18,
IMWRITE_PXM_BINARY =32 IMWRITE_PXM_BINARY = 32,
}; IMWRITE_WEBP_QUALITY = 64
};
enum { IMWRITE_PNG_STRATEGY_DEFAULT = 0,
IMWRITE_PNG_STRATEGY_FILTERED = 1,
IMWRITE_PNG_STRATEGY_HUFFMAN_ONLY = 2,
IMWRITE_PNG_STRATEGY_RLE = 3,
IMWRITE_PNG_STRATEGY_FIXED = 4
};
CV_EXPORTS_W Mat imread( const String& filename, int flags = IMREAD_COLOR );
CV_EXPORTS_W Mat imread( const String& filename, int flags=1 );
CV_EXPORTS_W bool imwrite( const String& filename, InputArray img, CV_EXPORTS_W bool imwrite( const String& filename, InputArray img,
const std::vector<int>& params=std::vector<int>()); const std::vector<int>& params = std::vector<int>());
CV_EXPORTS_W Mat imdecode( InputArray buf, int flags ); CV_EXPORTS_W Mat imdecode( InputArray buf, int flags );
CV_EXPORTS Mat imdecode( InputArray buf, int flags, Mat* dst );
CV_EXPORTS Mat imdecode( InputArray buf, int flags, Mat* dst);
CV_EXPORTS_W bool imencode( const String& ext, InputArray img, CV_EXPORTS_W bool imencode( const String& ext, InputArray img,
CV_OUT std::vector<uchar>& buf, CV_OUT std::vector<uchar>& buf,
const std::vector<int>& params=std::vector<int>()); const std::vector<int>& params = std::vector<int>());
#ifndef CV_NO_VIDEO_CAPTURE_CPP_API } // cv
////////////////////////////////// video io /////////////////////////////////
typedef struct CvCapture CvCapture;
typedef struct CvVideoWriter CvVideoWriter;
namespace cv
{
// Camera API
enum { CAP_ANY = 0, // autodetect
CAP_VFW = 200, // platform native
CAP_V4L = 200,
CAP_V4L2 = CAP_V4L,
CAP_FIREWARE = 300, // IEEE 1394 drivers
CAP_FIREWIRE = CAP_FIREWARE,
CAP_IEEE1394 = CAP_FIREWARE,
CAP_DC1394 = CAP_FIREWARE,
CAP_CMU1394 = CAP_FIREWARE,
CAP_QT = 500, // QuickTime
CAP_UNICAP = 600, // Unicap drivers
CAP_DSHOW = 700, // DirectShow (via videoInput)
CAP_PVAPI = 800, // PvAPI, Prosilica GigE SDK
CAP_OPENNI = 900, // OpenNI (for Kinect)
CAP_OPENNI_ASUS = 910, // OpenNI (for Asus Xtion)
CAP_ANDROID = 1000, // Android
CAP_XIAPI = 1100, // XIMEA Camera API
CAP_AVFOUNDATION = 1200, // AVFoundation framework for iOS (OS X Lion will have the same API)
CAP_GIGANETIX = 1300, // Smartek Giganetix GigEVisionSDK
CAP_MSMF = 1400 // Microsoft Media Foundation (via videoInput)
};
// generic properties (based on DC1394 properties)
enum { CAP_PROP_POS_MSEC =0,
CAP_PROP_POS_FRAMES =1,
CAP_PROP_POS_AVI_RATIO =2,
CAP_PROP_FRAME_WIDTH =3,
CAP_PROP_FRAME_HEIGHT =4,
CAP_PROP_FPS =5,
CAP_PROP_FOURCC =6,
CAP_PROP_FRAME_COUNT =7,
CAP_PROP_FORMAT =8,
CAP_PROP_MODE =9,
CAP_PROP_BRIGHTNESS =10,
CAP_PROP_CONTRAST =11,
CAP_PROP_SATURATION =12,
CAP_PROP_HUE =13,
CAP_PROP_GAIN =14,
CAP_PROP_EXPOSURE =15,
CAP_PROP_CONVERT_RGB =16,
CAP_PROP_WHITE_BALANCE_BLUE_U =17,
CAP_PROP_RECTIFICATION =18,
CAP_PROP_MONOCROME =19,
CAP_PROP_SHARPNESS =20,
CAP_PROP_AUTO_EXPOSURE =21, // DC1394: exposure control done by camera, user can adjust refernce level using this feature
CAP_PROP_GAMMA =22,
CAP_PROP_TEMPERATURE =23,
CAP_PROP_TRIGGER =24,
CAP_PROP_TRIGGER_DELAY =25,
CAP_PROP_WHITE_BALANCE_RED_V =26,
CAP_PROP_ZOOM =27,
CAP_PROP_FOCUS =28,
CAP_PROP_GUID =29,
CAP_PROP_ISO_SPEED =30,
CAP_PROP_BACKLIGHT =32,
CAP_PROP_PAN =33,
CAP_PROP_TILT =34,
CAP_PROP_ROLL =35,
CAP_PROP_IRIS =36,
CAP_PROP_SETTINGS =37
};
// DC1394 only
// modes of the controlling registers (can be: auto, manual, auto single push, absolute Latter allowed with any other mode)
// every feature can have only one mode turned on at a time
enum { CAP_PROP_DC1394_OFF = -4, //turn the feature off (not controlled manually nor automatically)
CAP_PROP_DC1394_MODE_MANUAL = -3, //set automatically when a value of the feature is set by the user
CAP_PROP_DC1394_MODE_AUTO = -2,
CAP_PROP_DC1394_MODE_ONE_PUSH_AUTO = -1,
CAP_PROP_DC1394_MAX = 31
};
// OpenNI map generators
enum { CAP_OPENNI_DEPTH_GENERATOR = 1 << 31,
CAP_OPENNI_IMAGE_GENERATOR = 1 << 30,
CAP_OPENNI_GENERATORS_MASK = CAP_OPENNI_DEPTH_GENERATOR + CAP_OPENNI_IMAGE_GENERATOR
};
// Properties of cameras available through OpenNI interfaces
enum { CAP_PROP_OPENNI_OUTPUT_MODE = 100,
CAP_PROP_OPENNI_FRAME_MAX_DEPTH = 101, // in mm
CAP_PROP_OPENNI_BASELINE = 102, // in mm
CAP_PROP_OPENNI_FOCAL_LENGTH = 103, // in pixels
CAP_PROP_OPENNI_REGISTRATION = 104, // flag that synchronizes the remapping depth map to image map
// by changing depth generator's view point (if the flag is "on") or
// sets this view point to its normal one (if the flag is "off").
CAP_PROP_OPENNI_REGISTRATION_ON = CAP_PROP_OPENNI_REGISTRATION,
CAP_PROP_OPENNI_APPROX_FRAME_SYNC = 105,
CAP_PROP_OPENNI_MAX_BUFFER_SIZE = 106,
CAP_PROP_OPENNI_CIRCLE_BUFFER = 107,
CAP_PROP_OPENNI_MAX_TIME_DURATION = 108,
CAP_PROP_OPENNI_GENERATOR_PRESENT = 109
};
// OpenNI shortcats
enum { CAP_OPENNI_IMAGE_GENERATOR_PRESENT = CAP_OPENNI_IMAGE_GENERATOR + CAP_PROP_OPENNI_GENERATOR_PRESENT,
CAP_OPENNI_IMAGE_GENERATOR_OUTPUT_MODE = CAP_OPENNI_IMAGE_GENERATOR + CAP_PROP_OPENNI_OUTPUT_MODE,
CAP_OPENNI_DEPTH_GENERATOR_BASELINE = CAP_OPENNI_DEPTH_GENERATOR + CAP_PROP_OPENNI_BASELINE,
CAP_OPENNI_DEPTH_GENERATOR_FOCAL_LENGTH = CAP_OPENNI_DEPTH_GENERATOR + CAP_PROP_OPENNI_FOCAL_LENGTH,
CAP_OPENNI_DEPTH_GENERATOR_REGISTRATION = CAP_OPENNI_DEPTH_GENERATOR + CAP_PROP_OPENNI_REGISTRATION,
CAP_OPENNI_DEPTH_GENERATOR_REGISTRATION_ON = CAP_OPENNI_DEPTH_GENERATOR_REGISTRATION
};
// OpenNI data given from depth generator
enum { CAP_OPENNI_DEPTH_MAP = 0, // Depth values in mm (CV_16UC1)
CAP_OPENNI_POINT_CLOUD_MAP = 1, // XYZ in meters (CV_32FC3)
CAP_OPENNI_DISPARITY_MAP = 2, // Disparity in pixels (CV_8UC1)
CAP_OPENNI_DISPARITY_MAP_32F = 3, // Disparity in pixels (CV_32FC1)
CAP_OPENNI_VALID_DEPTH_MASK = 4, // CV_8UC1
// Data given from RGB image generator
CAP_OPENNI_BGR_IMAGE = 5,
CAP_OPENNI_GRAY_IMAGE = 6
};
// Supported output modes of OpenNI image generator
enum { CAP_OPENNI_VGA_30HZ = 0,
CAP_OPENNI_SXGA_15HZ = 1,
CAP_OPENNI_SXGA_30HZ = 2,
CAP_OPENNI_QVGA_30HZ = 3,
CAP_OPENNI_QVGA_60HZ = 4
};
// GStreamer
enum { CAP_PROP_GSTREAMER_QUEUE_LENGTH = 200 // default is 1
};
// PVAPI
enum { CAP_PROP_PVAPI_MULTICASTIP = 300 // ip for anable multicast master mode. 0 for disable multicast
};
// Properties of cameras available through XIMEA SDK interface
enum { CAP_PROP_XI_DOWNSAMPLING = 400, // Change image resolution by binning or skipping.
CAP_PROP_XI_DATA_FORMAT = 401, // Output data format.
CAP_PROP_XI_OFFSET_X = 402, // Horizontal offset from the origin to the area of interest (in pixels).
CAP_PROP_XI_OFFSET_Y = 403, // Vertical offset from the origin to the area of interest (in pixels).
CAP_PROP_XI_TRG_SOURCE = 404, // Defines source of trigger.
CAP_PROP_XI_TRG_SOFTWARE = 405, // Generates an internal trigger. PRM_TRG_SOURCE must be set to TRG_SOFTWARE.
CAP_PROP_XI_GPI_SELECTOR = 406, // Selects general purpose input
CAP_PROP_XI_GPI_MODE = 407, // Set general purpose input mode
CAP_PROP_XI_GPI_LEVEL = 408, // Get general purpose level
CAP_PROP_XI_GPO_SELECTOR = 409, // Selects general purpose output
CAP_PROP_XI_GPO_MODE = 410, // Set general purpose output mode
CAP_PROP_XI_LED_SELECTOR = 411, // Selects camera signalling LED
CAP_PROP_XI_LED_MODE = 412, // Define camera signalling LED functionality
CAP_PROP_XI_MANUAL_WB = 413, // Calculates White Balance(must be called during acquisition)
CAP_PROP_XI_AUTO_WB = 414, // Automatic white balance
CAP_PROP_XI_AEAG = 415, // Automatic exposure/gain
CAP_PROP_XI_EXP_PRIORITY = 416, // Exposure priority (0.5 - exposure 50%, gain 50%).
CAP_PROP_XI_AE_MAX_LIMIT = 417, // Maximum limit of exposure in AEAG procedure
CAP_PROP_XI_AG_MAX_LIMIT = 418, // Maximum limit of gain in AEAG procedure
CAP_PROP_XI_AEAG_LEVEL = 419, // Average intensity of output signal AEAG should achieve(in %)
CAP_PROP_XI_TIMEOUT = 420 // Image capture timeout in milliseconds
};
// Properties for Android cameras
enum { CAP_PROP_ANDROID_AUTOGRAB = 1024,
CAP_PROP_ANDROID_PREVIEW_SIZES_STRING = 1025, // readonly, tricky property, returns const char* indeed
CAP_PROP_ANDROID_PREVIEW_FORMAT = 1026, // readonly, tricky property, returns const char* indeed
CAP_PROP_ANDROID_FLASH_MODE = 8001,
CAP_PROP_ANDROID_FOCUS_MODE = 8002,
CAP_PROP_ANDROID_WHITE_BALANCE = 8003,
CAP_PROP_ANDROID_ANTIBANDING = 8004,
CAP_PROP_ANDROID_FOCAL_LENGTH = 8005,
CAP_PROP_ANDROID_FOCUS_DISTANCE_NEAR = 8006,
CAP_PROP_ANDROID_FOCUS_DISTANCE_OPTIMAL = 8007,
CAP_PROP_ANDROID_FOCUS_DISTANCE_FAR = 8008
};
// Android camera output formats
enum { CAP_ANDROID_COLOR_FRAME_BGR = 0, //BGR
CAP_ANDROID_COLOR_FRAME = CAP_ANDROID_COLOR_FRAME_BGR,
CAP_ANDROID_GREY_FRAME = 1, //Y
CAP_ANDROID_COLOR_FRAME_RGB = 2,
CAP_ANDROID_COLOR_FRAME_BGRA = 3,
CAP_ANDROID_COLOR_FRAME_RGBA = 4
};
// Android camera flash modes
enum { CAP_ANDROID_FLASH_MODE_AUTO = 0,
CAP_ANDROID_FLASH_MODE_OFF = 1,
CAP_ANDROID_FLASH_MODE_ON = 2,
CAP_ANDROID_FLASH_MODE_RED_EYE = 3,
CAP_ANDROID_FLASH_MODE_TORCH = 4
};
// Android camera focus modes
enum { CAP_ANDROID_FOCUS_MODE_AUTO = 0,
CAP_ANDROID_FOCUS_MODE_CONTINUOUS_VIDEO = 1,
CAP_ANDROID_FOCUS_MODE_EDOF = 2,
CAP_ANDROID_FOCUS_MODE_FIXED = 3,
CAP_ANDROID_FOCUS_MODE_INFINITY = 4,
CAP_ANDROID_FOCUS_MODE_MACRO = 5
};
// Android camera white balance modes
enum { CAP_ANDROID_WHITE_BALANCE_AUTO = 0,
CAP_ANDROID_WHITE_BALANCE_CLOUDY_DAYLIGHT = 1,
CAP_ANDROID_WHITE_BALANCE_DAYLIGHT = 2,
CAP_ANDROID_WHITE_BALANCE_FLUORESCENT = 3,
CAP_ANDROID_WHITE_BALANCE_INCANDESCENT = 4,
CAP_ANDROID_WHITE_BALANCE_SHADE = 5,
CAP_ANDROID_WHITE_BALANCE_TWILIGHT = 6,
CAP_ANDROID_WHITE_BALANCE_WARM_FLUORESCENT = 7
};
// Android camera antibanding modes
enum { CAP_ANDROID_ANTIBANDING_50HZ = 0,
CAP_ANDROID_ANTIBANDING_60HZ = 1,
CAP_ANDROID_ANTIBANDING_AUTO = 2,
CAP_ANDROID_ANTIBANDING_OFF = 3
};
// Properties of cameras available through AVFOUNDATION interface
enum { CAP_PROP_IOS_DEVICE_FOCUS = 9001,
CAP_PROP_IOS_DEVICE_EXPOSURE = 9002,
CAP_PROP_IOS_DEVICE_FLASH = 9003,
CAP_PROP_IOS_DEVICE_WHITEBALANCE = 9004,
CAP_PROP_IOS_DEVICE_TORCH = 9005
};
// Properties of cameras available through Smartek Giganetix Ethernet Vision interface
/* --- Vladimir Litvinenko (litvinenko.vladimir@gmail.com) --- */
enum { CAP_PROP_GIGA_FRAME_OFFSET_X = 10001,
CAP_PROP_GIGA_FRAME_OFFSET_Y = 10002,
CAP_PROP_GIGA_FRAME_WIDTH_MAX = 10003,
CAP_PROP_GIGA_FRAME_HEIGH_MAX = 10004,
CAP_PROP_GIGA_FRAME_SENS_WIDTH = 10005,
CAP_PROP_GIGA_FRAME_SENS_HEIGH = 10006
};
template<> void CV_EXPORTS Ptr<CvCapture>::delete_obj();
template<> void CV_EXPORTS Ptr<CvVideoWriter>::delete_obj();
class CV_EXPORTS_W VideoCapture class CV_EXPORTS_W VideoCapture
{ {
@ -210,7 +509,7 @@ public:
CV_WRAP virtual void release(); CV_WRAP virtual void release();
CV_WRAP virtual bool grab(); CV_WRAP virtual bool grab();
CV_WRAP virtual bool retrieve(CV_OUT Mat& image, int channel=0); CV_WRAP virtual bool retrieve(CV_OUT Mat& image, int flag = 0);
virtual VideoCapture& operator >> (CV_OUT Mat& image); virtual VideoCapture& operator >> (CV_OUT Mat& image);
CV_WRAP virtual bool read(CV_OUT Mat& image); CV_WRAP virtual bool read(CV_OUT Mat& image);
@ -227,24 +526,25 @@ class CV_EXPORTS_W VideoWriter
public: public:
CV_WRAP VideoWriter(); CV_WRAP VideoWriter();
CV_WRAP VideoWriter(const String& filename, int fourcc, double fps, CV_WRAP VideoWriter(const String& filename, int fourcc, double fps,
Size frameSize, bool isColor=true); Size frameSize, bool isColor = true);
virtual ~VideoWriter(); virtual ~VideoWriter();
CV_WRAP virtual bool open(const String& filename, int fourcc, double fps, CV_WRAP virtual bool open(const String& filename, int fourcc, double fps,
Size frameSize, bool isColor=true); Size frameSize, bool isColor = true);
CV_WRAP virtual bool isOpened() const; CV_WRAP virtual bool isOpened() const;
CV_WRAP virtual void release(); CV_WRAP virtual void release();
virtual VideoWriter& operator << (const Mat& image); virtual VideoWriter& operator << (const Mat& image);
CV_WRAP virtual void write(const Mat& image); CV_WRAP virtual void write(const Mat& image);
CV_WRAP static int fourcc(char c1, char c2, char c3, char c4);
protected: protected:
Ptr<CvVideoWriter> writer; Ptr<CvVideoWriter> writer;
}; };
#endif template<> void Ptr<CvCapture>::delete_obj();
template<> void Ptr<CvVideoWriter>::delete_obj();
} } // cv
#endif
#endif #endif

View File

@ -570,9 +570,6 @@ CVAPI(CvVideoWriter*) cvCreateVideoWriter( const char* filename, int fourcc,
double fps, CvSize frame_size, double fps, CvSize frame_size,
int is_color CV_DEFAULT(1)); int is_color CV_DEFAULT(1));
//CVAPI(CvVideoWriter*) cvCreateImageSequenceWriter( const char* filename,
// int is_color CV_DEFAULT(1));
/* write frame to video file */ /* write frame to video file */
CVAPI(int) cvWriteFrame( CvVideoWriter* writer, const IplImage* image ); CVAPI(int) cvWriteFrame( CvVideoWriter* writer, const IplImage* image );

View File

@ -23,7 +23,7 @@ PERF_TEST_P(VideoWriter_Writing, WriteFrame,
string filename = getDataPath(get<0>(GetParam())); string filename = getDataPath(get<0>(GetParam()));
bool isColor = get<1>(GetParam()); bool isColor = get<1>(GetParam());
VideoWriter writer(cv::tempfile(".avi"), CV_FOURCC('X', 'V', 'I', 'D'), 25, cv::Size(640, 480), isColor); VideoWriter writer(cv::tempfile(".avi"), VideoWriter::fourcc('X', 'V', 'I', 'D'), 25, cv::Size(640, 480), isColor);
TEST_CYCLE() { Mat image = imread(filename, 1); writer << image; } TEST_CYCLE() { Mat image = imread(filename, 1); writer << image; }

View File

@ -540,9 +540,9 @@ double VideoCapture::get(int propId)
VideoWriter::VideoWriter() VideoWriter::VideoWriter()
{} {}
VideoWriter::VideoWriter(const String& filename, int fourcc, double fps, Size frameSize, bool isColor) VideoWriter::VideoWriter(const String& filename, int _fourcc, double fps, Size frameSize, bool isColor)
{ {
open(filename, fourcc, fps, frameSize, isColor); open(filename, _fourcc, fps, frameSize, isColor);
} }
void VideoWriter::release() void VideoWriter::release()
@ -555,9 +555,9 @@ VideoWriter::~VideoWriter()
release(); release();
} }
bool VideoWriter::open(const String& filename, int fourcc, double fps, Size frameSize, bool isColor) bool VideoWriter::open(const String& filename, int _fourcc, double fps, Size frameSize, bool isColor)
{ {
writer = cvCreateVideoWriter(filename.c_str(), fourcc, fps, frameSize, isColor); writer = cvCreateVideoWriter(filename.c_str(), _fourcc, fps, frameSize, isColor);
return isOpened(); return isOpened();
} }
@ -578,4 +578,9 @@ VideoWriter& VideoWriter::operator << (const Mat& image)
return *this; return *this;
} }
int VideoWriter::fourcc(char c1, char c2, char c3, char c4)
{
return (c1 & 255) + ((c2 & 255) << 8) + ((c3 & 255) << 16) + ((c4 & 255) << 24);
}
} }

View File

@ -41,7 +41,7 @@
#include "precomp.hpp" #include "precomp.hpp"
#ifdef HAVE_FFMPEG #if defined HAVE_FFMPEG && !defined WIN32
#include "cap_ffmpeg_impl.hpp" #include "cap_ffmpeg_impl.hpp"
#else #else
#include "cap_ffmpeg_api.hpp" #include "cap_ffmpeg_api.hpp"

View File

@ -61,7 +61,11 @@ extern "C" {
#endif #endif
#ifdef WIN32 #ifdef WIN32
#include <libavformat/avformat.h> # ifdef __OPENCV_BUILD
# define AVUTIL_COMMON_H
# define MKBETAG(a,b,c,d) ((d) | ((c) << 8) | ((b) << 16) | ((unsigned)(a) << 24))
# endif
# include <libavformat/avformat.h>
#else #else
// if the header path is not specified explicitly, let's deduce it // if the header path is not specified explicitly, let's deduce it

View File

@ -199,41 +199,4 @@ void cvSetRatioWindow_QT(const char* name,double prop_value);
double cvGetOpenGlProp_QT(const char* name); double cvGetOpenGlProp_QT(const char* name);
#endif #endif
/*namespace cv
{
class CV_EXPORTS BaseWindow
{
public:
BaseWindow(const String& name, int flags=0);
virtual ~BaseWindow();
virtual void close();
virtual void show(const Mat& mat);
virtual void resize(Size size);
virtual void move(Point topleft);
virtual Size size() const;
virtual Point topLeft() const;
virtual void setGeometry(Point topLeft, Size size);
virtual void getGeometry(Point& topLeft, Size& size) const;
virtual String getTitle() const;
virtual void setTitle(const String& str);
virtual String getName() const;
virtual void setScaleMode(int mode);
virtual int getScaleMode();
virtual void setScrollPos(double pos);
virtual double getScrollPos() const;
virtual void setScale(double scale);
virtual double getScale() const;
virtual Point getImageCoords(Point pos) const;
virtual Scalar getPixelValue(Point pos, const String& colorspace=String()) const;
virtual void addTrackbar( const String& trackbar, int low, int high, int step );
};
typedef Ptr<BaseWindow> Window;
}*/
#endif /* __HIGHGUI_H_ */ #endif /* __HIGHGUI_H_ */

View File

@ -342,15 +342,16 @@ CV_IMPL void cvUpdateWindow(const char*)
#if defined (HAVE_QT) #if defined (HAVE_QT)
CvFont cv::fontQt(const String& nameFont, int pointSize, Scalar color, int weight, int style, int /*spacing*/) cv::QtFont cv::fontQt(const String& nameFont, int pointSize, Scalar color, int weight, int style, int /*spacing*/)
{ {
return cvFontQt(nameFont.c_str(), pointSize,color,weight, style); CvFont f = cvFontQt(nameFont.c_str(), pointSize,color,weight, style);
return *(cv::QtFont*)(&f);
} }
void cv::addText( const Mat& img, const String& text, Point org, CvFont font) void cv::addText( const Mat& img, const String& text, Point org, const QtFont& font)
{ {
CvMat _img = img; CvMat _img = img;
cvAddText( &_img, text.c_str(), org,&font); cvAddText( &_img, text.c_str(), org, (CvFont*)&font);
} }
void cv::displayStatusBar(const String& name, const String& text, int delayms) void cv::displayStatusBar(const String& name, const String& text, int delayms)
@ -390,13 +391,13 @@ int cv::createButton(const String& button_name, ButtonCallback on_change, void*
#else #else
CvFont cv::fontQt(const String&, int, Scalar, int, int, int) cv::QtFont cv::fontQt(const String&, int, Scalar, int, int, int)
{ {
CV_Error(CV_StsNotImplemented, "The library is compiled without QT support"); CV_Error(CV_StsNotImplemented, "The library is compiled without QT support");
return CvFont(); return QtFont();
} }
void cv::addText( const Mat&, const String&, Point, CvFont) void cv::addText( const Mat&, const String&, Point, const QtFont&)
{ {
CV_Error(CV_StsNotImplemented, "The library is compiled without QT support"); CV_Error(CV_StsNotImplemented, "The library is compiled without QT support");
} }

View File

@ -95,15 +95,15 @@ public:
double fps = fps0; double fps = fps0;
Size frame_s = Size(img_c, img_r); Size frame_s = Size(img_c, img_r);
if( tag == CV_FOURCC('H', '2', '6', '1') ) if( tag == VideoWriter::fourcc('H', '2', '6', '1') )
frame_s = Size(352, 288); frame_s = Size(352, 288);
else if( tag == CV_FOURCC('H', '2', '6', '3') ) else if( tag == VideoWriter::fourcc('H', '2', '6', '3') )
frame_s = Size(704, 576); frame_s = Size(704, 576);
/*else if( tag == CV_FOURCC('M', 'J', 'P', 'G') || /*else if( tag == CV_FOURCC('M', 'J', 'P', 'G') ||
tag == CV_FOURCC('j', 'p', 'e', 'g') ) tag == CV_FOURCC('j', 'p', 'e', 'g') )
frame_s = Size(1920, 1080);*/ frame_s = Size(1920, 1080);*/
if( tag == CV_FOURCC('M', 'P', 'E', 'G') ) if( tag == VideoWriter::fourcc('M', 'P', 'E', 'G') )
fps = 25; fps = 25;
VideoWriter writer(filename, tag, fps, frame_s); VideoWriter writer(filename, tag, fps, frame_s);
@ -201,7 +201,7 @@ public:
std::string fileName = tempfile(stream.str().c_str()); std::string fileName = tempfile(stream.str().c_str());
files->operator[](i) = fileName; files->operator[](i) = fileName;
writers->operator[](i) = new VideoWriter(fileName, CV_FOURCC('X','V','I','D'), 25.0f, FrameSize); writers->operator[](i) = new VideoWriter(fileName, VideoWriter::fourcc('X','V','I','D'), 25.0f, FrameSize);
CV_Assert(writers->operator[](i)->isOpened()); CV_Assert(writers->operator[](i)->isOpened());
} }
@ -311,7 +311,7 @@ public:
CV_Assert(capture->isOpened()); CV_Assert(capture->isOpened());
const static double eps = 23.0; const static double eps = 23.0;
unsigned int frameCount = static_cast<unsigned int>(capture->get(CV_CAP_PROP_FRAME_COUNT)); unsigned int frameCount = static_cast<unsigned int>(capture->get(CAP_PROP_FRAME_COUNT));
CV_Assert(frameCount == WriteVideo_Invoker::FrameCount); CV_Assert(frameCount == WriteVideo_Invoker::FrameCount);
Mat reference(CreateVideoWriterInvoker::FrameSize, CV_8UC3); Mat reference(CreateVideoWriterInvoker::FrameSize, CV_8UC3);

View File

@ -41,7 +41,7 @@
//M*/ //M*/
#include "test_precomp.hpp" #include "test_precomp.hpp"
#include "opencv2/highgui.hpp" #include "opencv2/highgui/highgui_c.h"
#include <stdio.h> #include <stdio.h>
using namespace cv; using namespace cv;
@ -91,7 +91,7 @@ void CV_FramecountTest::run(int)
FrameCount++; FrameCount++;
} }
int framecount = (int)cvGetCaptureProperty(cap, CV_CAP_PROP_FRAME_COUNT); int framecount = (int)cvGetCaptureProperty(cap, CAP_PROP_FRAME_COUNT);
ts->printf(cvtest::TS::LOG, "\nFile information (video %d): \n"\ ts->printf(cvtest::TS::LOG, "\nFile information (video %d): \n"\
"\nName: big_buck_bunny.%s\nActual frame count: %d\n"\ "\nName: big_buck_bunny.%s\nActual frame count: %d\n"\

View File

@ -113,7 +113,7 @@ public:
imwrite(img_path, img); imwrite(img_path, img);
ts->printf(ts->LOG, "reading test image : %s\n", img_path.c_str()); ts->printf(ts->LOG, "reading test image : %s\n", img_path.c_str());
Mat img_test = imread(img_path, CV_LOAD_IMAGE_UNCHANGED); Mat img_test = imread(img_path, IMREAD_UNCHANGED);
if (img_test.empty()) ts->set_failed_test_info(ts->FAIL_MISMATCH); if (img_test.empty()) ts->set_failed_test_info(ts->FAIL_MISMATCH);
@ -140,11 +140,11 @@ public:
string filename = cv::tempfile(".jpg"); string filename = cv::tempfile(".jpg");
imwrite(filename, img); imwrite(filename, img);
img = imread(filename, CV_LOAD_IMAGE_UNCHANGED); img = imread(filename, IMREAD_UNCHANGED);
filename = string(ts->get_data_path() + "readwrite/test_" + char(k + 48) + "_c" + char(num_channels + 48) + ".jpg"); filename = string(ts->get_data_path() + "readwrite/test_" + char(k + 48) + "_c" + char(num_channels + 48) + ".jpg");
ts->printf(ts->LOG, "reading test image : %s\n", filename.c_str()); ts->printf(ts->LOG, "reading test image : %s\n", filename.c_str());
Mat img_test = imread(filename, CV_LOAD_IMAGE_UNCHANGED); Mat img_test = imread(filename, IMREAD_UNCHANGED);
if (img_test.empty()) ts->set_failed_test_info(ts->FAIL_MISMATCH); if (img_test.empty()) ts->set_failed_test_info(ts->FAIL_MISMATCH);
@ -171,7 +171,7 @@ public:
string filename = cv::tempfile(".tiff"); string filename = cv::tempfile(".tiff");
imwrite(filename, img); imwrite(filename, img);
ts->printf(ts->LOG, "reading test image : %s\n", filename.c_str()); ts->printf(ts->LOG, "reading test image : %s\n", filename.c_str());
Mat img_test = imread(filename, CV_LOAD_IMAGE_UNCHANGED); Mat img_test = imread(filename, IMREAD_UNCHANGED);
if (img_test.empty()) ts->set_failed_test_info(ts->FAIL_MISMATCH); if (img_test.empty()) ts->set_failed_test_info(ts->FAIL_MISMATCH);
@ -242,12 +242,12 @@ public:
Mat im = Mat::zeros(1000,1000, CV_8U); Mat im = Mat::zeros(1000,1000, CV_8U);
//randu(im, 0, 256); //randu(im, 0, 256);
vector<int> param; vector<int> param;
param.push_back(CV_IMWRITE_PNG_COMPRESSION); param.push_back(IMWRITE_PNG_COMPRESSION);
param.push_back(3); //default(3) 0-9. param.push_back(3); //default(3) 0-9.
cv::imencode(".png" ,im ,buff, param); cv::imencode(".png" ,im ,buff, param);
// hangs // hangs
Mat im2 = imdecode(buff,CV_LOAD_IMAGE_ANYDEPTH); Mat im2 = imdecode(buff,IMREAD_ANYDEPTH);
} }
catch(...) catch(...)
{ {
@ -375,7 +375,7 @@ TEST(Highgui_WebP, encode_decode_lossless_webp)
remove(output.c_str()); remove(output.c_str());
cv::Mat decode = cv::imdecode(buf, CV_LOAD_IMAGE_COLOR); cv::Mat decode = cv::imdecode(buf, IMREAD_COLOR);
ASSERT_FALSE(decode.empty()); ASSERT_FALSE(decode.empty());
EXPECT_TRUE(cv::norm(decode, img_webp, NORM_INF) == 0); EXPECT_TRUE(cv::norm(decode, img_webp, NORM_INF) == 0);
@ -394,7 +394,7 @@ TEST(Highgui_WebP, encode_decode_lossy_webp)
for(int q = 100; q>=0; q-=10) for(int q = 100; q>=0; q-=10)
{ {
std::vector<int> params; std::vector<int> params;
params.push_back(CV_IMWRITE_WEBP_QUALITY); params.push_back(IMWRITE_WEBP_QUALITY);
params.push_back(q); params.push_back(q);
string output = cv::tempfile(".webp"); string output = cv::tempfile(".webp");

View File

@ -59,7 +59,7 @@ void Foo(int /*k*/, void* /*z*/) {}
void CV_HighGuiOnlyGuiTest::run( int /*start_from */) void CV_HighGuiOnlyGuiTest::run( int /*start_from */)
{ {
ts->printf(ts->LOG, "GUI 0\n"); ts->printf(ts->LOG, "GUI 0\n");
cvDestroyAllWindows(); destroyAllWindows();
ts->printf(ts->LOG, "GUI 1\n"); ts->printf(ts->LOG, "GUI 1\n");
namedWindow("Win"); namedWindow("Win");
@ -84,7 +84,7 @@ void CV_HighGuiOnlyGuiTest::run( int /*start_from */)
waitKey(500); waitKey(500);
ts->printf(ts->LOG, "GUI 8\n"); ts->printf(ts->LOG, "GUI 8\n");
cvDestroyAllWindows(); destroyAllWindows();
ts->set_failed_test_info(cvtest::TS::OK); ts->set_failed_test_info(cvtest::TS::OK);
} }

View File

@ -41,7 +41,7 @@
//M*/ //M*/
#include "test_precomp.hpp" #include "test_precomp.hpp"
#include "opencv2/highgui.hpp" #include "opencv2/highgui/highgui_c.h"
#include <stdio.h> #include <stdio.h>
using namespace cv; using namespace cv;
@ -88,7 +88,7 @@ CV_VideoRandomPositioningTest::~CV_VideoRandomPositioningTest() {}
void CV_VideoPositioningTest::generate_idx_seq(CvCapture* cap, int method) void CV_VideoPositioningTest::generate_idx_seq(CvCapture* cap, int method)
{ {
idx.clear(); idx.clear();
int N = (int)cvGetCaptureProperty(cap, CV_CAP_PROP_FRAME_COUNT); int N = (int)cvGetCaptureProperty(cap, CAP_PROP_FRAME_COUNT);
switch(method) switch(method)
{ {
case PROGRESSIVE: case PROGRESSIVE:
@ -147,7 +147,7 @@ void CV_VideoPositioningTest::run_test(int method)
failed_videos++; continue; failed_videos++; continue;
} }
cvSetCaptureProperty(cap, CV_CAP_PROP_POS_FRAMES, 0); cvSetCaptureProperty(cap, CAP_PROP_POS_FRAMES, 0);
generate_idx_seq(cap, method); generate_idx_seq(cap, method);
@ -157,7 +157,7 @@ void CV_VideoPositioningTest::run_test(int method)
{ {
bool flag = false; bool flag = false;
cvSetCaptureProperty(cap, CV_CAP_PROP_POS_FRAMES, idx.at(j)); cvSetCaptureProperty(cap, CAP_PROP_POS_FRAMES, idx.at(j));
/* IplImage* frame = cvRetrieveFrame(cap); /* IplImage* frame = cvRetrieveFrame(cap);
@ -173,7 +173,7 @@ void CV_VideoPositioningTest::run_test(int method)
flag = !flag; flag = !flag;
} */ } */
int val = (int)cvGetCaptureProperty(cap, CV_CAP_PROP_POS_FRAMES); int val = (int)cvGetCaptureProperty(cap, CAP_PROP_POS_FRAMES);
if (idx.at(j) != val) if (idx.at(j) != val)
{ {

View File

@ -12,6 +12,7 @@
#include <iostream> #include <iostream>
#include "opencv2/ts.hpp" #include "opencv2/ts.hpp"
#include "opencv2/imgproc.hpp" #include "opencv2/imgproc.hpp"
#include "opencv2/highgui.hpp"
#include "opencv2/imgproc/imgproc_c.h" #include "opencv2/imgproc/imgproc_c.h"
#include "opencv2/core/private.hpp" #include "opencv2/core/private.hpp"

View File

@ -41,7 +41,7 @@
//M*/ //M*/
#include "test_precomp.hpp" #include "test_precomp.hpp"
#include "opencv2/highgui.hpp" #include "opencv2/highgui/highgui_c.h"
using namespace cv; using namespace cv;
using namespace std; using namespace std;
@ -56,15 +56,15 @@ string fourccToString(int fourcc)
const VideoFormat g_specific_fmt_list[] = const VideoFormat g_specific_fmt_list[] =
{ {
VideoFormat("avi", CV_FOURCC('X', 'V', 'I', 'D')), VideoFormat("avi", VideoWriter::fourcc('X', 'V', 'I', 'D')),
VideoFormat("avi", CV_FOURCC('M', 'P', 'E', 'G')), VideoFormat("avi", VideoWriter::fourcc('M', 'P', 'E', 'G')),
VideoFormat("avi", CV_FOURCC('M', 'J', 'P', 'G')), VideoFormat("avi", VideoWriter::fourcc('M', 'J', 'P', 'G')),
//VideoFormat("avi", CV_FOURCC('I', 'Y', 'U', 'V')), //VideoFormat("avi", VideoWriter::fourcc('I', 'Y', 'U', 'V')),
VideoFormat("mkv", CV_FOURCC('X', 'V', 'I', 'D')), VideoFormat("mkv", VideoWriter::fourcc('X', 'V', 'I', 'D')),
VideoFormat("mkv", CV_FOURCC('M', 'P', 'E', 'G')), VideoFormat("mkv", VideoWriter::fourcc('M', 'P', 'E', 'G')),
VideoFormat("mkv", CV_FOURCC('M', 'J', 'P', 'G')), VideoFormat("mkv", VideoWriter::fourcc('M', 'J', 'P', 'G')),
VideoFormat("mov", CV_FOURCC('m', 'p', '4', 'v')), VideoFormat("mov", VideoWriter::fourcc('m', 'p', '4', 'v')),
VideoFormat() VideoFormat()
}; };
@ -416,7 +416,7 @@ void CV_HighGuiTest::SpecificVideoTest(const string& dir, const cvtest::VideoFor
for( size_t i = 0; i < IMAGE_COUNT; ++i ) for( size_t i = 0; i < IMAGE_COUNT; ++i )
{ {
string file_path = format("%s../python/images/QCIF_%02d.bmp", dir.c_str(), i); string file_path = format("%s../python/images/QCIF_%02d.bmp", dir.c_str(), i);
Mat img = imread(file_path, CV_LOAD_IMAGE_COLOR); Mat img = imread(file_path, IMREAD_COLOR);
if (img.empty()) if (img.empty())
{ {
@ -442,7 +442,7 @@ void CV_HighGuiTest::SpecificVideoTest(const string& dir, const cvtest::VideoFor
writer.release(); writer.release();
VideoCapture cap(video_file); VideoCapture cap(video_file);
size_t FRAME_COUNT = (size_t)cap.get(CV_CAP_PROP_FRAME_COUNT); size_t FRAME_COUNT = (size_t)cap.get(CAP_PROP_FRAME_COUNT);
if (FRAME_COUNT != IMAGE_COUNT ) if (FRAME_COUNT != IMAGE_COUNT )
{ {

View File

@ -110,9 +110,9 @@ public:
return; return;
} }
int N0 = (int)cap.get(CV_CAP_PROP_FRAME_COUNT); int N0 = (int)cap.get(CAP_PROP_FRAME_COUNT);
cap.set(CV_CAP_PROP_POS_FRAMES, 0); cap.set(CAP_PROP_POS_FRAMES, 0);
int N = (int)cap.get(CV_CAP_PROP_FRAME_COUNT); int N = (int)cap.get(CAP_PROP_FRAME_COUNT);
if (N != n_frames || N != N0) if (N != n_frames || N != N0)
{ {
@ -125,14 +125,14 @@ public:
{ {
int idx = theRNG().uniform(0, N); int idx = theRNG().uniform(0, N);
if( !cap.set(CV_CAP_PROP_POS_FRAMES, idx) ) if( !cap.set(CAP_PROP_POS_FRAMES, idx) )
{ {
ts->printf(ts->LOG, "\nError: cannot seek to frame %d.\n", idx); ts->printf(ts->LOG, "\nError: cannot seek to frame %d.\n", idx);
ts->set_failed_test_info(ts->FAIL_INVALID_OUTPUT); ts->set_failed_test_info(ts->FAIL_INVALID_OUTPUT);
return; return;
} }
int idx1 = (int)cap.get(CV_CAP_PROP_POS_FRAMES); int idx1 = (int)cap.get(CAP_PROP_POS_FRAMES);
Mat img; cap >> img; Mat img; cap >> img;
Mat img0 = drawFrame(idx); Mat img0 = drawFrame(idx);

View File

@ -1356,7 +1356,7 @@ pyrMeanShiftFiltering
--------------------- ---------------------
Performs initial step of meanshift segmentation of an image. Performs initial step of meanshift segmentation of an image.
.. ocv:function:: void pyrMeanShiftFiltering( InputArray src, OutputArray dst, double sp, double sr, int maxLevel=1, TermCriteria termcrit=TermCriteria( TermCriteria::MAX_ITER+TermCriteria::EPS,5,1) ) .. ocv:function:: void pyrMeanShiftFiltering( InputArray src, OutputArray dst, double sp, double sr, int maxLevel=1, TermCriteria termcrit=TermCriteria(TermCriteria::MAX_ITER+TermCriteria::EPS,5,1) )
.. ocv:pyfunction:: cv2.pyrMeanShiftFiltering(src, sp, sr[, dst[, maxLevel[, termcrit]]]) -> dst .. ocv:pyfunction:: cv2.pyrMeanShiftFiltering(src, sp, sr[, dst[, maxLevel[, termcrit]]]) -> dst

File diff suppressed because it is too large Load Diff

View File

@ -45,6 +45,10 @@
#include "opencv2/core/core_c.h" #include "opencv2/core/core_c.h"
#ifdef __cplusplus
# include "opencv2/imgproc.hpp"
#endif
#ifdef __cplusplus #ifdef __cplusplus
extern "C" { extern "C" {
#endif #endif
@ -383,6 +387,24 @@ typedef struct CvMoments
double m00, m10, m01, m20, m11, m02, m30, m21, m12, m03; /* spatial moments */ double m00, m10, m01, m20, m11, m02, m30, m21, m12, m03; /* spatial moments */
double mu20, mu11, mu02, mu30, mu21, mu12, mu03; /* central moments */ double mu20, mu11, mu02, mu30, mu21, mu12, mu03; /* central moments */
double inv_sqrt_m00; /* m00 != 0 ? 1/sqrt(m00) : 0 */ double inv_sqrt_m00; /* m00 != 0 ? 1/sqrt(m00) : 0 */
#ifdef __cplusplus
CvMoments(){}
CvMoments(const cv::Moments& m)
{
m00 = m.m00; m10 = m.m10; m01 = m.m01;
m20 = m.m20; m11 = m.m11; m02 = m.m02;
m30 = m.m30; m21 = m.m21; m12 = m.m12; m03 = m.m03;
mu20 = m.mu20; mu11 = m.mu11; mu02 = m.mu02;
mu30 = m.mu30; mu21 = m.mu21; mu12 = m.mu12; mu03 = m.mu03;
double am00 = std::abs(m.m00);
inv_sqrt_m00 = am00 > DBL_EPSILON ? 1./std::sqrt(am00) : 0;
}
operator cv::Moments() const
{
return cv::Moments(m00, m10, m01, m20, m11, m02, m30, m21, m12, m03);
}
#endif
} }
CvMoments; CvMoments;

View File

@ -9,114 +9,114 @@ using std::tr1::get;
//extra color conversions supported implicitly //extra color conversions supported implicitly
enum enum
{ {
CX_BGRA2HLS = CV_COLORCVT_MAX + CV_BGR2HLS, CX_BGRA2HLS = COLOR_COLORCVT_MAX + COLOR_BGR2HLS,
CX_BGRA2HLS_FULL = CV_COLORCVT_MAX + CV_BGR2HLS_FULL, CX_BGRA2HLS_FULL = COLOR_COLORCVT_MAX + COLOR_BGR2HLS_FULL,
CX_BGRA2HSV = CV_COLORCVT_MAX + CV_BGR2HSV, CX_BGRA2HSV = COLOR_COLORCVT_MAX + COLOR_BGR2HSV,
CX_BGRA2HSV_FULL = CV_COLORCVT_MAX + CV_BGR2HSV_FULL, CX_BGRA2HSV_FULL = COLOR_COLORCVT_MAX + COLOR_BGR2HSV_FULL,
CX_BGRA2Lab = CV_COLORCVT_MAX + CV_BGR2Lab, CX_BGRA2Lab = COLOR_COLORCVT_MAX + COLOR_BGR2Lab,
CX_BGRA2Luv = CV_COLORCVT_MAX + CV_BGR2Luv, CX_BGRA2Luv = COLOR_COLORCVT_MAX + COLOR_BGR2Luv,
CX_BGRA2XYZ = CV_COLORCVT_MAX + CV_BGR2XYZ, CX_BGRA2XYZ = COLOR_COLORCVT_MAX + COLOR_BGR2XYZ,
CX_BGRA2YCrCb = CV_COLORCVT_MAX + CV_BGR2YCrCb, CX_BGRA2YCrCb = COLOR_COLORCVT_MAX + COLOR_BGR2YCrCb,
CX_BGRA2YUV = CV_COLORCVT_MAX + CV_BGR2YUV, CX_BGRA2YUV = COLOR_COLORCVT_MAX + COLOR_BGR2YUV,
CX_HLS2BGRA = CV_COLORCVT_MAX + CV_HLS2BGR, CX_HLS2BGRA = COLOR_COLORCVT_MAX + COLOR_HLS2BGR,
CX_HLS2BGRA_FULL = CV_COLORCVT_MAX + CV_HLS2BGR_FULL, CX_HLS2BGRA_FULL = COLOR_COLORCVT_MAX + COLOR_HLS2BGR_FULL,
CX_HLS2RGBA = CV_COLORCVT_MAX + CV_HLS2RGB, CX_HLS2RGBA = COLOR_COLORCVT_MAX + COLOR_HLS2RGB,
CX_HLS2RGBA_FULL = CV_COLORCVT_MAX + CV_HLS2RGB_FULL, CX_HLS2RGBA_FULL = COLOR_COLORCVT_MAX + COLOR_HLS2RGB_FULL,
CX_HSV2BGRA = CV_COLORCVT_MAX + CV_HSV2BGR, CX_HSV2BGRA = COLOR_COLORCVT_MAX + COLOR_HSV2BGR,
CX_HSV2BGRA_FULL = CV_COLORCVT_MAX + CV_HSV2BGR_FULL, CX_HSV2BGRA_FULL = COLOR_COLORCVT_MAX + COLOR_HSV2BGR_FULL,
CX_HSV2RGBA = CV_COLORCVT_MAX + CV_HSV2RGB, CX_HSV2RGBA = COLOR_COLORCVT_MAX + COLOR_HSV2RGB,
CX_HSV2RGBA_FULL = CV_COLORCVT_MAX + CV_HSV2RGB_FULL, CX_HSV2RGBA_FULL = COLOR_COLORCVT_MAX + COLOR_HSV2RGB_FULL,
CX_Lab2BGRA = CV_COLORCVT_MAX + CV_Lab2BGR, CX_Lab2BGRA = COLOR_COLORCVT_MAX + COLOR_Lab2BGR,
CX_Lab2LBGRA = CV_COLORCVT_MAX + CV_Lab2LBGR, CX_Lab2LBGRA = COLOR_COLORCVT_MAX + COLOR_Lab2LBGR,
CX_Lab2LRGBA = CV_COLORCVT_MAX + CV_Lab2LRGB, CX_Lab2LRGBA = COLOR_COLORCVT_MAX + COLOR_Lab2LRGB,
CX_Lab2RGBA = CV_COLORCVT_MAX + CV_Lab2RGB, CX_Lab2RGBA = COLOR_COLORCVT_MAX + COLOR_Lab2RGB,
CX_LBGRA2Lab = CV_COLORCVT_MAX + CV_LBGR2Lab, CX_LBGRA2Lab = COLOR_COLORCVT_MAX + COLOR_LBGR2Lab,
CX_LBGRA2Luv = CV_COLORCVT_MAX + CV_LBGR2Luv, CX_LBGRA2Luv = COLOR_COLORCVT_MAX + COLOR_LBGR2Luv,
CX_LRGBA2Lab = CV_COLORCVT_MAX + CV_LRGB2Lab, CX_LRGBA2Lab = COLOR_COLORCVT_MAX + COLOR_LRGB2Lab,
CX_LRGBA2Luv = CV_COLORCVT_MAX + CV_LRGB2Luv, CX_LRGBA2Luv = COLOR_COLORCVT_MAX + COLOR_LRGB2Luv,
CX_Luv2BGRA = CV_COLORCVT_MAX + CV_Luv2BGR, CX_Luv2BGRA = COLOR_COLORCVT_MAX + COLOR_Luv2BGR,
CX_Luv2LBGRA = CV_COLORCVT_MAX + CV_Luv2LBGR, CX_Luv2LBGRA = COLOR_COLORCVT_MAX + COLOR_Luv2LBGR,
CX_Luv2LRGBA = CV_COLORCVT_MAX + CV_Luv2LRGB, CX_Luv2LRGBA = COLOR_COLORCVT_MAX + COLOR_Luv2LRGB,
CX_Luv2RGBA = CV_COLORCVT_MAX + CV_Luv2RGB, CX_Luv2RGBA = COLOR_COLORCVT_MAX + COLOR_Luv2RGB,
CX_RGBA2HLS = CV_COLORCVT_MAX + CV_RGB2HLS, CX_RGBA2HLS = COLOR_COLORCVT_MAX + COLOR_RGB2HLS,
CX_RGBA2HLS_FULL = CV_COLORCVT_MAX + CV_RGB2HLS_FULL, CX_RGBA2HLS_FULL = COLOR_COLORCVT_MAX + COLOR_RGB2HLS_FULL,
CX_RGBA2HSV = CV_COLORCVT_MAX + CV_RGB2HSV, CX_RGBA2HSV = COLOR_COLORCVT_MAX + COLOR_RGB2HSV,
CX_RGBA2HSV_FULL = CV_COLORCVT_MAX + CV_RGB2HSV_FULL, CX_RGBA2HSV_FULL = COLOR_COLORCVT_MAX + COLOR_RGB2HSV_FULL,
CX_RGBA2Lab = CV_COLORCVT_MAX + CV_RGB2Lab, CX_RGBA2Lab = COLOR_COLORCVT_MAX + COLOR_RGB2Lab,
CX_RGBA2Luv = CV_COLORCVT_MAX + CV_RGB2Luv, CX_RGBA2Luv = COLOR_COLORCVT_MAX + COLOR_RGB2Luv,
CX_RGBA2XYZ = CV_COLORCVT_MAX + CV_RGB2XYZ, CX_RGBA2XYZ = COLOR_COLORCVT_MAX + COLOR_RGB2XYZ,
CX_RGBA2YCrCb = CV_COLORCVT_MAX + CV_RGB2YCrCb, CX_RGBA2YCrCb = COLOR_COLORCVT_MAX + COLOR_RGB2YCrCb,
CX_RGBA2YUV = CV_COLORCVT_MAX + CV_RGB2YUV, CX_RGBA2YUV = COLOR_COLORCVT_MAX + COLOR_RGB2YUV,
CX_XYZ2BGRA = CV_COLORCVT_MAX + CV_XYZ2BGR, CX_XYZ2BGRA = COLOR_COLORCVT_MAX + COLOR_XYZ2BGR,
CX_XYZ2RGBA = CV_COLORCVT_MAX + CV_XYZ2RGB, CX_XYZ2RGBA = COLOR_COLORCVT_MAX + COLOR_XYZ2RGB,
CX_YCrCb2BGRA = CV_COLORCVT_MAX + CV_YCrCb2BGR, CX_YCrCb2BGRA = COLOR_COLORCVT_MAX + COLOR_YCrCb2BGR,
CX_YCrCb2RGBA = CV_COLORCVT_MAX + CV_YCrCb2RGB, CX_YCrCb2RGBA = COLOR_COLORCVT_MAX + COLOR_YCrCb2RGB,
CX_YUV2BGRA = CV_COLORCVT_MAX + CV_YUV2BGR, CX_YUV2BGRA = COLOR_COLORCVT_MAX + COLOR_YUV2BGR,
CX_YUV2RGBA = CV_COLORCVT_MAX + CV_YUV2RGB CX_YUV2RGBA = COLOR_COLORCVT_MAX + COLOR_YUV2RGB
}; };
CV_ENUM(CvtMode, CV_ENUM(CvtMode,
CV_BGR2BGR555, CV_BGR2BGR565, CV_BGR2BGRA, CV_BGR2GRAY, COLOR_BGR2BGR555, COLOR_BGR2BGR565, COLOR_BGR2BGRA, COLOR_BGR2GRAY,
CV_BGR2HLS, CV_BGR2HLS_FULL, CV_BGR2HSV, CV_BGR2HSV_FULL, COLOR_BGR2HLS, COLOR_BGR2HLS_FULL, COLOR_BGR2HSV, COLOR_BGR2HSV_FULL,
CV_BGR2Lab, CV_BGR2Luv, CV_BGR2RGB, CV_BGR2RGBA, CV_BGR2XYZ, COLOR_BGR2Lab, COLOR_BGR2Luv, COLOR_BGR2RGB, COLOR_BGR2RGBA, COLOR_BGR2XYZ,
CV_BGR2YCrCb, CV_BGR2YUV, CV_BGR5552BGR, CV_BGR5552BGRA, COLOR_BGR2YCrCb, COLOR_BGR2YUV, COLOR_BGR5552BGR, COLOR_BGR5552BGRA,
CV_BGR5552GRAY, CV_BGR5552RGB, CV_BGR5552RGBA, CV_BGR5652BGR, COLOR_BGR5552GRAY, COLOR_BGR5552RGB, COLOR_BGR5552RGBA, COLOR_BGR5652BGR,
CV_BGR5652BGRA, CV_BGR5652GRAY, CV_BGR5652RGB, CV_BGR5652RGBA, COLOR_BGR5652BGRA, COLOR_BGR5652GRAY, COLOR_BGR5652RGB, COLOR_BGR5652RGBA,
CV_BGRA2BGR, CV_BGRA2BGR555, CV_BGRA2BGR565, CV_BGRA2GRAY, CV_BGRA2RGBA, COLOR_BGRA2BGR, COLOR_BGRA2BGR555, COLOR_BGRA2BGR565, COLOR_BGRA2GRAY, COLOR_BGRA2RGBA,
CX_BGRA2HLS, CX_BGRA2HLS_FULL, CX_BGRA2HSV, CX_BGRA2HSV_FULL, CX_BGRA2HLS, CX_BGRA2HLS_FULL, CX_BGRA2HSV, CX_BGRA2HSV_FULL,
CX_BGRA2Lab, CX_BGRA2Luv, CX_BGRA2XYZ, CX_BGRA2Lab, CX_BGRA2Luv, CX_BGRA2XYZ,
CX_BGRA2YCrCb, CX_BGRA2YUV, CX_BGRA2YCrCb, CX_BGRA2YUV,
CV_GRAY2BGR, CV_GRAY2BGR555, CV_GRAY2BGR565, CV_GRAY2BGRA, COLOR_GRAY2BGR, COLOR_GRAY2BGR555, COLOR_GRAY2BGR565, COLOR_GRAY2BGRA,
CV_HLS2BGR, CV_HLS2BGR_FULL, CV_HLS2RGB, CV_HLS2RGB_FULL, COLOR_HLS2BGR, COLOR_HLS2BGR_FULL, COLOR_HLS2RGB, COLOR_HLS2RGB_FULL,
CX_HLS2BGRA, CX_HLS2BGRA_FULL, CX_HLS2RGBA, CX_HLS2RGBA_FULL, CX_HLS2BGRA, CX_HLS2BGRA_FULL, CX_HLS2RGBA, CX_HLS2RGBA_FULL,
CV_HSV2BGR, CV_HSV2BGR_FULL, CV_HSV2RGB, CV_HSV2RGB_FULL, COLOR_HSV2BGR, COLOR_HSV2BGR_FULL, COLOR_HSV2RGB, COLOR_HSV2RGB_FULL,
CX_HSV2BGRA, CX_HSV2BGRA_FULL, CX_HSV2RGBA, CX_HSV2RGBA_FULL, CX_HSV2BGRA, CX_HSV2BGRA_FULL, CX_HSV2RGBA, CX_HSV2RGBA_FULL,
CV_Lab2BGR, CV_Lab2LBGR, CV_Lab2LRGB, CV_Lab2RGB, COLOR_Lab2BGR, COLOR_Lab2LBGR, COLOR_Lab2LRGB, COLOR_Lab2RGB,
CX_Lab2BGRA, CX_Lab2LBGRA, CX_Lab2LRGBA, CX_Lab2RGBA, CX_Lab2BGRA, CX_Lab2LBGRA, CX_Lab2LRGBA, CX_Lab2RGBA,
CV_LBGR2Lab, CV_LBGR2Luv, CV_LRGB2Lab, CV_LRGB2Luv, COLOR_LBGR2Lab, COLOR_LBGR2Luv, COLOR_LRGB2Lab, COLOR_LRGB2Luv,
CX_LBGRA2Lab, CX_LBGRA2Luv, CX_LRGBA2Lab, CX_LRGBA2Luv, CX_LBGRA2Lab, CX_LBGRA2Luv, CX_LRGBA2Lab, CX_LRGBA2Luv,
CV_Luv2BGR, CV_Luv2LBGR, CV_Luv2LRGB, CV_Luv2RGB, COLOR_Luv2BGR, COLOR_Luv2LBGR, COLOR_Luv2LRGB, COLOR_Luv2RGB,
CX_Luv2BGRA, CX_Luv2LBGRA, CX_Luv2LRGBA, CX_Luv2RGBA, CX_Luv2BGRA, CX_Luv2LBGRA, CX_Luv2LRGBA, CX_Luv2RGBA,
CV_RGB2BGR555, CV_RGB2BGR565, CV_RGB2GRAY, COLOR_RGB2BGR555, COLOR_RGB2BGR565, COLOR_RGB2GRAY,
CV_RGB2HLS, CV_RGB2HLS_FULL, CV_RGB2HSV, CV_RGB2HSV_FULL, COLOR_RGB2HLS, COLOR_RGB2HLS_FULL, COLOR_RGB2HSV, COLOR_RGB2HSV_FULL,
CV_RGB2Lab, CV_RGB2Luv, CV_RGB2XYZ, CV_RGB2YCrCb, CV_RGB2YUV, COLOR_RGB2Lab, COLOR_RGB2Luv, COLOR_RGB2XYZ, COLOR_RGB2YCrCb, COLOR_RGB2YUV,
CV_RGBA2BGR, CV_RGBA2BGR555, CV_RGBA2BGR565, CV_RGBA2GRAY, COLOR_RGBA2BGR, COLOR_RGBA2BGR555, COLOR_RGBA2BGR565, COLOR_RGBA2GRAY,
CX_RGBA2HLS, CX_RGBA2HLS_FULL, CX_RGBA2HSV, CX_RGBA2HSV_FULL, CX_RGBA2HLS, CX_RGBA2HLS_FULL, CX_RGBA2HSV, CX_RGBA2HSV_FULL,
CX_RGBA2Lab, CX_RGBA2Luv, CX_RGBA2XYZ, CX_RGBA2Lab, CX_RGBA2Luv, CX_RGBA2XYZ,
CX_RGBA2YCrCb, CX_RGBA2YUV, CX_RGBA2YCrCb, CX_RGBA2YUV,
CV_XYZ2BGR, CV_XYZ2RGB, CX_XYZ2BGRA, CX_XYZ2RGBA, COLOR_XYZ2BGR, COLOR_XYZ2RGB, CX_XYZ2BGRA, CX_XYZ2RGBA,
CV_YCrCb2BGR, CV_YCrCb2RGB, CX_YCrCb2BGRA, CX_YCrCb2RGBA, COLOR_YCrCb2BGR, COLOR_YCrCb2RGB, CX_YCrCb2BGRA, CX_YCrCb2RGBA,
CV_YUV2BGR, CV_YUV2RGB, CX_YUV2BGRA, CX_YUV2RGBA COLOR_YUV2BGR, COLOR_YUV2RGB, CX_YUV2BGRA, CX_YUV2RGBA
) )
CV_ENUM(CvtModeBayer, CV_ENUM(CvtModeBayer,
CV_BayerBG2BGR, CV_BayerBG2BGR_VNG, CV_BayerBG2GRAY, COLOR_BayerBG2BGR, COLOR_BayerBG2BGR_VNG, COLOR_BayerBG2GRAY,
CV_BayerGB2BGR, CV_BayerGB2BGR_VNG, CV_BayerGB2GRAY, COLOR_BayerGB2BGR, COLOR_BayerGB2BGR_VNG, COLOR_BayerGB2GRAY,
CV_BayerGR2BGR, CV_BayerGR2BGR_VNG, CV_BayerGR2GRAY, COLOR_BayerGR2BGR, COLOR_BayerGR2BGR_VNG, COLOR_BayerGR2GRAY,
CV_BayerRG2BGR, CV_BayerRG2BGR_VNG, CV_BayerRG2GRAY COLOR_BayerRG2BGR, COLOR_BayerRG2BGR_VNG, COLOR_BayerRG2GRAY
) )
CV_ENUM(CvtMode2, CV_YUV2BGR_NV12, CV_YUV2BGRA_NV12, CV_YUV2RGB_NV12, CV_YUV2RGBA_NV12, CV_YUV2BGR_NV21, CV_YUV2BGRA_NV21, CV_YUV2RGB_NV21, CV_YUV2RGBA_NV21, CV_ENUM(CvtMode2, COLOR_YUV2BGR_NV12, COLOR_YUV2BGRA_NV12, COLOR_YUV2RGB_NV12, COLOR_YUV2RGBA_NV12, COLOR_YUV2BGR_NV21, COLOR_YUV2BGRA_NV21, COLOR_YUV2RGB_NV21, COLOR_YUV2RGBA_NV21,
CV_YUV2BGR_YV12, CV_YUV2BGRA_YV12, CV_YUV2RGB_YV12, CV_YUV2RGBA_YV12, CV_YUV2BGR_IYUV, CV_YUV2BGRA_IYUV, CV_YUV2RGB_IYUV, CV_YUV2RGBA_IYUV, COLOR_YUV2BGR_YV12, COLOR_YUV2BGRA_YV12, COLOR_YUV2RGB_YV12, COLOR_YUV2RGBA_YV12, COLOR_YUV2BGR_IYUV, COLOR_YUV2BGRA_IYUV, COLOR_YUV2RGB_IYUV, COLOR_YUV2RGBA_IYUV,
COLOR_YUV2GRAY_420, CV_YUV2RGB_UYVY, CV_YUV2BGR_UYVY, CV_YUV2RGBA_UYVY, CV_YUV2BGRA_UYVY, CV_YUV2RGB_YUY2, CV_YUV2BGR_YUY2, CV_YUV2RGB_YVYU, COLOR_YUV2GRAY_420, COLOR_YUV2RGB_UYVY, COLOR_YUV2BGR_UYVY, COLOR_YUV2RGBA_UYVY, COLOR_YUV2BGRA_UYVY, COLOR_YUV2RGB_YUY2, COLOR_YUV2BGR_YUY2, COLOR_YUV2RGB_YVYU,
CV_YUV2BGR_YVYU, CV_YUV2RGBA_YUY2, CV_YUV2BGRA_YUY2, CV_YUV2RGBA_YVYU, CV_YUV2BGRA_YVYU) COLOR_YUV2BGR_YVYU, COLOR_YUV2RGBA_YUY2, COLOR_YUV2BGRA_YUY2, COLOR_YUV2RGBA_YVYU, COLOR_YUV2BGRA_YVYU)
CV_ENUM(CvtMode3, CV_RGB2YUV_IYUV, CV_BGR2YUV_IYUV, CV_RGBA2YUV_IYUV, CV_BGRA2YUV_IYUV, CV_ENUM(CvtMode3, COLOR_RGB2YUV_IYUV, COLOR_BGR2YUV_IYUV, COLOR_RGBA2YUV_IYUV, COLOR_BGRA2YUV_IYUV,
CV_RGB2YUV_YV12, CV_BGR2YUV_YV12, CV_RGBA2YUV_YV12, CV_BGRA2YUV_YV12) COLOR_RGB2YUV_YV12, COLOR_BGR2YUV_YV12, COLOR_RGBA2YUV_YV12, COLOR_BGRA2YUV_YV12)
struct ChPair struct ChPair
{ {
@ -128,74 +128,74 @@ ChPair getConversionInfo(int cvtMode)
{ {
switch(cvtMode) switch(cvtMode)
{ {
case CV_BayerBG2GRAY: case CV_BayerGB2GRAY: case COLOR_BayerBG2GRAY: case COLOR_BayerGB2GRAY:
case CV_BayerGR2GRAY: case CV_BayerRG2GRAY: case COLOR_BayerGR2GRAY: case COLOR_BayerRG2GRAY:
case CV_YUV2GRAY_420: case COLOR_YUV2GRAY_420:
return ChPair(1,1); return ChPair(1,1);
case CV_GRAY2BGR555: case CV_GRAY2BGR565: case COLOR_GRAY2BGR555: case COLOR_GRAY2BGR565:
return ChPair(1,2); return ChPair(1,2);
case CV_BayerBG2BGR: case CV_BayerBG2BGR_VNG: case COLOR_BayerBG2BGR: case COLOR_BayerBG2BGR_VNG:
case CV_BayerGB2BGR: case CV_BayerGB2BGR_VNG: case COLOR_BayerGB2BGR: case COLOR_BayerGB2BGR_VNG:
case CV_BayerGR2BGR: case CV_BayerGR2BGR_VNG: case COLOR_BayerGR2BGR: case COLOR_BayerGR2BGR_VNG:
case CV_BayerRG2BGR: case CV_BayerRG2BGR_VNG: case COLOR_BayerRG2BGR: case COLOR_BayerRG2BGR_VNG:
case CV_GRAY2BGR: case COLOR_GRAY2BGR:
case CV_YUV2BGR_NV12: case CV_YUV2RGB_NV12: case COLOR_YUV2BGR_NV12: case COLOR_YUV2RGB_NV12:
case CV_YUV2BGR_NV21: case CV_YUV2RGB_NV21: case COLOR_YUV2BGR_NV21: case COLOR_YUV2RGB_NV21:
case CV_YUV2BGR_YV12: case CV_YUV2RGB_YV12: case COLOR_YUV2BGR_YV12: case COLOR_YUV2RGB_YV12:
case CV_YUV2BGR_IYUV: case CV_YUV2RGB_IYUV: case COLOR_YUV2BGR_IYUV: case COLOR_YUV2RGB_IYUV:
return ChPair(1,3); return ChPair(1,3);
case CV_GRAY2BGRA: case COLOR_GRAY2BGRA:
case CV_YUV2BGRA_NV12: case CV_YUV2RGBA_NV12: case COLOR_YUV2BGRA_NV12: case COLOR_YUV2RGBA_NV12:
case CV_YUV2BGRA_NV21: case CV_YUV2RGBA_NV21: case COLOR_YUV2BGRA_NV21: case COLOR_YUV2RGBA_NV21:
case CV_YUV2BGRA_YV12: case CV_YUV2RGBA_YV12: case COLOR_YUV2BGRA_YV12: case COLOR_YUV2RGBA_YV12:
case CV_YUV2BGRA_IYUV: case CV_YUV2RGBA_IYUV: case COLOR_YUV2BGRA_IYUV: case COLOR_YUV2RGBA_IYUV:
return ChPair(1,4); return ChPair(1,4);
case CV_BGR5552GRAY: case CV_BGR5652GRAY: case COLOR_BGR5552GRAY: case COLOR_BGR5652GRAY:
return ChPair(2,1); return ChPair(2,1);
case CV_BGR5552BGR: case CV_BGR5552RGB: case COLOR_BGR5552BGR: case COLOR_BGR5552RGB:
case CV_BGR5652BGR: case CV_BGR5652RGB: case COLOR_BGR5652BGR: case COLOR_BGR5652RGB:
case CV_YUV2RGB_UYVY: case CV_YUV2BGR_UYVY: case COLOR_YUV2RGB_UYVY: case COLOR_YUV2BGR_UYVY:
case CV_YUV2RGBA_UYVY: case CV_YUV2BGRA_UYVY: case COLOR_YUV2RGBA_UYVY: case COLOR_YUV2BGRA_UYVY:
case CV_YUV2RGB_YUY2: case CV_YUV2BGR_YUY2: case COLOR_YUV2RGB_YUY2: case COLOR_YUV2BGR_YUY2:
case CV_YUV2RGB_YVYU: case CV_YUV2BGR_YVYU: case COLOR_YUV2RGB_YVYU: case COLOR_YUV2BGR_YVYU:
case CV_YUV2RGBA_YUY2: case CV_YUV2BGRA_YUY2: case COLOR_YUV2RGBA_YUY2: case COLOR_YUV2BGRA_YUY2:
case CV_YUV2RGBA_YVYU: case CV_YUV2BGRA_YVYU: case COLOR_YUV2RGBA_YVYU: case COLOR_YUV2BGRA_YVYU:
return ChPair(2,3); return ChPair(2,3);
case CV_BGR5552BGRA: case CV_BGR5552RGBA: case COLOR_BGR5552BGRA: case COLOR_BGR5552RGBA:
case CV_BGR5652BGRA: case CV_BGR5652RGBA: case COLOR_BGR5652BGRA: case COLOR_BGR5652RGBA:
return ChPair(2,4); return ChPair(2,4);
case CV_BGR2GRAY: case CV_RGB2GRAY: case COLOR_BGR2GRAY: case COLOR_RGB2GRAY:
case CV_RGB2YUV_IYUV: case CV_RGB2YUV_YV12: case COLOR_RGB2YUV_IYUV: case COLOR_RGB2YUV_YV12:
case CV_BGR2YUV_IYUV: case CV_BGR2YUV_YV12: case COLOR_BGR2YUV_IYUV: case COLOR_BGR2YUV_YV12:
return ChPair(3,1); return ChPair(3,1);
case CV_BGR2BGR555: case CV_BGR2BGR565: case COLOR_BGR2BGR555: case COLOR_BGR2BGR565:
case CV_RGB2BGR555: case CV_RGB2BGR565: case COLOR_RGB2BGR555: case COLOR_RGB2BGR565:
return ChPair(3,2); return ChPair(3,2);
case CV_BGR2HLS: case CV_BGR2HLS_FULL: case COLOR_BGR2HLS: case COLOR_BGR2HLS_FULL:
case CV_BGR2HSV: case CV_BGR2HSV_FULL: case COLOR_BGR2HSV: case COLOR_BGR2HSV_FULL:
case CV_BGR2Lab: case CV_BGR2Luv: case COLOR_BGR2Lab: case COLOR_BGR2Luv:
case CV_BGR2RGB: case CV_BGR2XYZ: case COLOR_BGR2RGB: case COLOR_BGR2XYZ:
case CV_BGR2YCrCb: case CV_BGR2YUV: case COLOR_BGR2YCrCb: case COLOR_BGR2YUV:
case CV_HLS2BGR: case CV_HLS2BGR_FULL: case COLOR_HLS2BGR: case COLOR_HLS2BGR_FULL:
case CV_HLS2RGB: case CV_HLS2RGB_FULL: case COLOR_HLS2RGB: case COLOR_HLS2RGB_FULL:
case CV_HSV2BGR: case CV_HSV2BGR_FULL: case COLOR_HSV2BGR: case COLOR_HSV2BGR_FULL:
case CV_HSV2RGB: case CV_HSV2RGB_FULL: case COLOR_HSV2RGB: case COLOR_HSV2RGB_FULL:
case CV_Lab2BGR: case CV_Lab2LBGR: case COLOR_Lab2BGR: case COLOR_Lab2LBGR:
case CV_Lab2LRGB: case CV_Lab2RGB: case COLOR_Lab2LRGB: case COLOR_Lab2RGB:
case CV_LBGR2Lab: case CV_LBGR2Luv: case COLOR_LBGR2Lab: case COLOR_LBGR2Luv:
case CV_LRGB2Lab: case CV_LRGB2Luv: case COLOR_LRGB2Lab: case COLOR_LRGB2Luv:
case CV_Luv2BGR: case CV_Luv2LBGR: case COLOR_Luv2BGR: case COLOR_Luv2LBGR:
case CV_Luv2LRGB: case CV_Luv2RGB: case COLOR_Luv2LRGB: case COLOR_Luv2RGB:
case CV_RGB2HLS: case CV_RGB2HLS_FULL: case COLOR_RGB2HLS: case COLOR_RGB2HLS_FULL:
case CV_RGB2HSV: case CV_RGB2HSV_FULL: case COLOR_RGB2HSV: case COLOR_RGB2HSV_FULL:
case CV_RGB2Lab: case CV_RGB2Luv: case COLOR_RGB2Lab: case COLOR_RGB2Luv:
case CV_RGB2XYZ: case CV_RGB2YCrCb: case COLOR_RGB2XYZ: case COLOR_RGB2YCrCb:
case CV_RGB2YUV: case CV_XYZ2BGR: case COLOR_RGB2YUV: case COLOR_XYZ2BGR:
case CV_XYZ2RGB: case CV_YCrCb2BGR: case COLOR_XYZ2RGB: case COLOR_YCrCb2BGR:
case CV_YCrCb2RGB: case CV_YUV2BGR: case COLOR_YCrCb2RGB: case COLOR_YUV2BGR:
case CV_YUV2RGB: case COLOR_YUV2RGB:
return ChPair(3,3); return ChPair(3,3);
case CV_BGR2BGRA: case CV_BGR2RGBA: case COLOR_BGR2BGRA: case COLOR_BGR2RGBA:
case CX_HLS2BGRA: case CX_HLS2BGRA_FULL: case CX_HLS2BGRA: case CX_HLS2BGRA_FULL:
case CX_HLS2RGBA: case CX_HLS2RGBA_FULL: case CX_HLS2RGBA: case CX_HLS2RGBA_FULL:
case CX_HSV2BGRA: case CX_HSV2BGRA_FULL: case CX_HSV2BGRA: case CX_HSV2BGRA_FULL:
@ -208,27 +208,27 @@ ChPair getConversionInfo(int cvtMode)
case CX_YCrCb2BGRA: case CX_YCrCb2RGBA: case CX_YCrCb2BGRA: case CX_YCrCb2RGBA:
case CX_YUV2BGRA: case CX_YUV2RGBA: case CX_YUV2BGRA: case CX_YUV2RGBA:
return ChPair(3,4); return ChPair(3,4);
case CV_BGRA2GRAY: case CV_RGBA2GRAY: case COLOR_BGRA2GRAY: case COLOR_RGBA2GRAY:
case CV_RGBA2YUV_IYUV: case CV_RGBA2YUV_YV12: case COLOR_RGBA2YUV_IYUV: case COLOR_RGBA2YUV_YV12:
case CV_BGRA2YUV_IYUV: case CV_BGRA2YUV_YV12: case COLOR_BGRA2YUV_IYUV: case COLOR_BGRA2YUV_YV12:
return ChPair(4,1); return ChPair(4,1);
case CV_BGRA2BGR555: case CV_BGRA2BGR565: case COLOR_BGRA2BGR555: case COLOR_BGRA2BGR565:
case CV_RGBA2BGR555: case CV_RGBA2BGR565: case COLOR_RGBA2BGR555: case COLOR_RGBA2BGR565:
return ChPair(4,2); return ChPair(4,2);
case CV_BGRA2BGR: case CX_BGRA2HLS: case COLOR_BGRA2BGR: case CX_BGRA2HLS:
case CX_BGRA2HLS_FULL: case CX_BGRA2HSV: case CX_BGRA2HLS_FULL: case CX_BGRA2HSV:
case CX_BGRA2HSV_FULL: case CX_BGRA2Lab: case CX_BGRA2HSV_FULL: case CX_BGRA2Lab:
case CX_BGRA2Luv: case CX_BGRA2XYZ: case CX_BGRA2Luv: case CX_BGRA2XYZ:
case CX_BGRA2YCrCb: case CX_BGRA2YUV: case CX_BGRA2YCrCb: case CX_BGRA2YUV:
case CX_LBGRA2Lab: case CX_LBGRA2Luv: case CX_LBGRA2Lab: case CX_LBGRA2Luv:
case CX_LRGBA2Lab: case CX_LRGBA2Luv: case CX_LRGBA2Lab: case CX_LRGBA2Luv:
case CV_RGBA2BGR: case CX_RGBA2HLS: case COLOR_RGBA2BGR: case CX_RGBA2HLS:
case CX_RGBA2HLS_FULL: case CX_RGBA2HSV: case CX_RGBA2HLS_FULL: case CX_RGBA2HSV:
case CX_RGBA2HSV_FULL: case CX_RGBA2Lab: case CX_RGBA2HSV_FULL: case CX_RGBA2Lab:
case CX_RGBA2Luv: case CX_RGBA2XYZ: case CX_RGBA2Luv: case CX_RGBA2XYZ:
case CX_RGBA2YCrCb: case CX_RGBA2YUV: case CX_RGBA2YCrCb: case CX_RGBA2YUV:
return ChPair(4,3); return ChPair(4,3);
case CV_BGRA2RGBA: case COLOR_BGRA2RGBA:
return ChPair(4,4); return ChPair(4,4);
default: default:
ADD_FAILURE() << "Unknown conversion type"; ADD_FAILURE() << "Unknown conversion type";
@ -250,7 +250,7 @@ PERF_TEST_P(Size_CvtMode, cvtColor8u,
Size sz = get<0>(GetParam()); Size sz = get<0>(GetParam());
int mode = get<1>(GetParam()); int mode = get<1>(GetParam());
ChPair ch = getConversionInfo(mode); ChPair ch = getConversionInfo(mode);
mode %= CV_COLORCVT_MAX; mode %= COLOR_COLORCVT_MAX;
Mat src(sz, CV_8UC(ch.scn)); Mat src(sz, CV_8UC(ch.scn));
Mat dst(sz, CV_8UC(ch.dcn)); Mat dst(sz, CV_8UC(ch.dcn));
@ -276,7 +276,7 @@ PERF_TEST_P(Size_CvtMode_Bayer, cvtColorBayer8u,
Size sz = get<0>(GetParam()); Size sz = get<0>(GetParam());
int mode = get<1>(GetParam()); int mode = get<1>(GetParam());
ChPair ch = getConversionInfo(mode); ChPair ch = getConversionInfo(mode);
mode %= CV_COLORCVT_MAX; mode %= COLOR_COLORCVT_MAX;
Mat src(sz, CV_8UC(ch.scn)); Mat src(sz, CV_8UC(ch.scn));
Mat dst(sz, CV_8UC(ch.dcn)); Mat dst(sz, CV_8UC(ch.dcn));

View File

@ -6,7 +6,7 @@ using namespace perf;
using std::tr1::make_tuple; using std::tr1::make_tuple;
using std::tr1::get; using std::tr1::get;
CV_ENUM(MethodType, CV_TM_SQDIFF, CV_TM_SQDIFF_NORMED, CV_TM_CCORR, CV_TM_CCORR_NORMED, CV_TM_CCOEFF, CV_TM_CCOEFF_NORMED) CV_ENUM(MethodType, TM_SQDIFF, TM_SQDIFF_NORMED, TM_CCORR, TM_CCORR_NORMED, TM_CCOEFF, TM_CCOEFF_NORMED)
typedef std::tr1::tuple<Size, Size, MethodType> ImgSize_TmplSize_Method_t; typedef std::tr1::tuple<Size, Size, MethodType> ImgSize_TmplSize_Method_t;
typedef perf::TestBaseWithParam<ImgSize_TmplSize_Method_t> ImgSize_TmplSize_Method; typedef perf::TestBaseWithParam<ImgSize_TmplSize_Method_t> ImgSize_TmplSize_Method;
@ -39,9 +39,9 @@ PERF_TEST_P(ImgSize_TmplSize_Method, matchTemplateSmall,
TEST_CYCLE() matchTemplate(img, tmpl, result, method); TEST_CYCLE() matchTemplate(img, tmpl, result, method);
bool isNormed = bool isNormed =
method == CV_TM_CCORR_NORMED || method == TM_CCORR_NORMED ||
method == CV_TM_SQDIFF_NORMED || method == TM_SQDIFF_NORMED ||
method == CV_TM_CCOEFF_NORMED; method == TM_CCOEFF_NORMED;
double eps = isNormed ? 1e-6 double eps = isNormed ? 1e-6
: 255 * 255 * tmpl.total() * 1e-6; : 255 * 255 * tmpl.total() * 1e-6;
@ -73,9 +73,9 @@ PERF_TEST_P(ImgSize_TmplSize_Method, matchTemplateBig,
TEST_CYCLE() matchTemplate(img, tmpl, result, method); TEST_CYCLE() matchTemplate(img, tmpl, result, method);
bool isNormed = bool isNormed =
method == CV_TM_CCORR_NORMED || method == TM_CCORR_NORMED ||
method == CV_TM_SQDIFF_NORMED || method == TM_SQDIFF_NORMED ||
method == CV_TM_CCOEFF_NORMED; method == TM_CCOEFF_NORMED;
double eps = isNormed ? 1e-6 double eps = isNormed ? 1e-6
: 255 * 255 * tmpl.total() * 1e-6; : 255 * 255 * tmpl.total() * 1e-6;

View File

@ -162,7 +162,7 @@ calcHarris( const Mat& _cov, Mat& _dst, double k )
} }
void eigen2x2( const float* cov, float* dst, int n ) static void eigen2x2( const float* cov, float* dst, int n )
{ {
for( int j = 0; j < n; j++ ) for( int j = 0; j < n; j++ )
{ {

View File

@ -43,10 +43,6 @@
namespace cv namespace cv
{ {
template<> void Ptr<CvHistogram>::delete_obj()
{ cvReleaseHist(&obj); }
////////////////// Helper functions ////////////////////// ////////////////// Helper functions //////////////////////
static const size_t OUT_OF_RANGE = (size_t)1 << (sizeof(size_t)*8 - 2); static const size_t OUT_OF_RANGE = (size_t)1 << (sizeof(size_t)*8 - 2);

View File

@ -354,25 +354,6 @@ Moments::Moments( double _m00, double _m10, double _m01, double _m20, double _m1
nu30 = mu30*s3; nu21 = mu21*s3; nu12 = mu12*s3; nu03 = mu03*s3; nu30 = mu30*s3; nu21 = mu21*s3; nu12 = mu12*s3; nu03 = mu03*s3;
} }
Moments::Moments( const CvMoments& m )
{
*this = Moments(m.m00, m.m10, m.m01, m.m20, m.m11, m.m02, m.m30, m.m21, m.m12, m.m03);
}
Moments::operator CvMoments() const
{
CvMoments m;
m.m00 = m00; m.m10 = m10; m.m01 = m01;
m.m20 = m20; m.m11 = m11; m.m02 = m02;
m.m30 = m30; m.m21 = m21; m.m12 = m12; m.m03 = m03;
m.mu20 = mu20; m.mu11 = mu11; m.mu02 = mu02;
m.mu30 = mu30; m.mu21 = mu21; m.mu12 = mu12; m.mu03 = mu03;
double am00 = std::abs(m00);
m.inv_sqrt_m00 = am00 > DBL_EPSILON ? 1./std::sqrt(am00) : 0;
return m;
}
} }

View File

@ -1191,9 +1191,6 @@ static void morphOp( int op, InputArray _src, OutputArray _dst,
// f->apply( dst, dst ); // f->apply( dst, dst );
} }
template<> void Ptr<IplConvKernel>::delete_obj()
{ cvReleaseStructuringElement(&obj); }
} }
void cv::erode( InputArray src, OutputArray dst, InputArray kernel, void cv::erode( InputArray src, OutputArray dst, InputArray kernel,

View File

@ -1687,8 +1687,8 @@ TEST(Imgproc_ColorBayer, regression)
{ {
cvtest::TS* ts = cvtest::TS::ptr(); cvtest::TS* ts = cvtest::TS::ptr();
Mat given = imread(string(ts->get_data_path()) + "/cvtcolor/bayer_input.png", CV_LOAD_IMAGE_GRAYSCALE); Mat given = imread(string(ts->get_data_path()) + "/cvtcolor/bayer_input.png", IMREAD_GRAYSCALE);
Mat gold = imread(string(ts->get_data_path()) + "/cvtcolor/bayer_gold.png", CV_LOAD_IMAGE_UNCHANGED); Mat gold = imread(string(ts->get_data_path()) + "/cvtcolor/bayer_gold.png", IMREAD_UNCHANGED);
Mat result; Mat result;
CV_Assert(given.data != NULL && gold.data != NULL); CV_Assert(given.data != NULL && gold.data != NULL);
@ -1709,9 +1709,9 @@ TEST(Imgproc_ColorBayerVNG, regression)
{ {
cvtest::TS* ts = cvtest::TS::ptr(); cvtest::TS* ts = cvtest::TS::ptr();
Mat given = imread(string(ts->get_data_path()) + "/cvtcolor/bayer_input.png", CV_LOAD_IMAGE_GRAYSCALE); Mat given = imread(string(ts->get_data_path()) + "/cvtcolor/bayer_input.png", IMREAD_GRAYSCALE);
string goldfname = string(ts->get_data_path()) + "/cvtcolor/bayerVNG_gold.png"; string goldfname = string(ts->get_data_path()) + "/cvtcolor/bayerVNG_gold.png";
Mat gold = imread(goldfname, CV_LOAD_IMAGE_UNCHANGED); Mat gold = imread(goldfname, IMREAD_UNCHANGED);
Mat result; Mat result;
CV_Assert(given.data != NULL); CV_Assert(given.data != NULL);
@ -1804,7 +1804,7 @@ TEST(Imgproc_ColorBayerVNG_Strict, regression)
Mat src, dst, bayer, reference; Mat src, dst, bayer, reference;
std::string full_path = parent_path + image_name; std::string full_path = parent_path + image_name;
src = imread(full_path, CV_LOAD_IMAGE_UNCHANGED); src = imread(full_path, IMREAD_UNCHANGED);
if (src.data == NULL) if (src.data == NULL)
{ {
@ -1824,7 +1824,7 @@ TEST(Imgproc_ColorBayerVNG_Strict, regression)
// reading a reference image // reading a reference image
full_path = parent_path + pattern[i] + image_name; full_path = parent_path + pattern[i] + image_name;
reference = imread(full_path, CV_LOAD_IMAGE_UNCHANGED); reference = imread(full_path, IMREAD_UNCHANGED);
if (reference.data == NULL) if (reference.data == NULL)
{ {
imwrite(full_path, dst); imwrite(full_path, dst);
@ -2091,7 +2091,7 @@ TEST(ImgProc_BayerEdgeAwareDemosaicing, accuracy)
Mat src, bayer; Mat src, bayer;
std::string full_path = parent_path + image_name; std::string full_path = parent_path + image_name;
src = imread(full_path, CV_LOAD_IMAGE_UNCHANGED); src = imread(full_path, IMREAD_UNCHANGED);
if (src.data == NULL) if (src.data == NULL)
{ {
@ -2141,7 +2141,7 @@ TEST(ImgProc_BayerEdgeAwareDemosaicing, accuracy)
TEST(ImgProc_Bayer2RGBA, accuracy) TEST(ImgProc_Bayer2RGBA, accuracy)
{ {
cvtest::TS* ts = cvtest::TS::ptr(); cvtest::TS* ts = cvtest::TS::ptr();
Mat raw = imread(string(ts->get_data_path()) + "/cvtcolor/bayer_input.png", CV_LOAD_IMAGE_GRAYSCALE); Mat raw = imread(string(ts->get_data_path()) + "/cvtcolor/bayer_input.png", IMREAD_GRAYSCALE);
Mat rgb, reference; Mat rgb, reference;
CV_Assert(raw.channels() == 1); CV_Assert(raw.channels() == 1);

View File

@ -143,8 +143,8 @@ TEST(Imgproc_GrabCut, repeatability)
{ {
cvtest::TS& ts = *cvtest::TS::ptr(); cvtest::TS& ts = *cvtest::TS::ptr();
Mat image_1 = imread(string(ts.get_data_path()) + "grabcut/image1652.ppm", CV_LOAD_IMAGE_COLOR); Mat image_1 = imread(string(ts.get_data_path()) + "grabcut/image1652.ppm", IMREAD_COLOR);
Mat mask_1 = imread(string(ts.get_data_path()) + "grabcut/mask1652.ppm", CV_LOAD_IMAGE_GRAYSCALE); Mat mask_1 = imread(string(ts.get_data_path()) + "grabcut/mask1652.ppm", IMREAD_GRAYSCALE);
Rect roi_1(0, 0, 150, 150); Rect roi_1(0, 0, 150, 150);
Mat image_2 = image_1.clone(); Mat image_2 = image_1.clone();

View File

@ -12,8 +12,8 @@
#include <iostream> #include <iostream>
#include "opencv2/ts.hpp" #include "opencv2/ts.hpp"
#include "opencv2/imgproc.hpp" #include "opencv2/imgproc.hpp"
#include "opencv2/imgproc/imgproc_c.h"
#include "opencv2/highgui.hpp" #include "opencv2/highgui.hpp"
#include "opencv2/highgui/highgui_c.h"
#include "opencv2/imgproc/imgproc_c.h"
#endif #endif

View File

@ -4,7 +4,6 @@
#include "opencv2/opencv_modules.hpp" #include "opencv2/opencv_modules.hpp"
#ifdef HAVE_OPENCV_HIGHGUI #ifdef HAVE_OPENCV_HIGHGUI
#include "opencv2/highgui/highgui_c.h"
#include "opencv2/highgui.hpp" #include "opencv2/highgui.hpp"
using namespace cv; using namespace cv;
@ -394,7 +393,7 @@ JNIEXPORT jstring JNICALL Java_org_opencv_highgui_VideoCapture_n_1getSupportedPr
VideoCapture* me = (VideoCapture*) self; //TODO: check for NULL VideoCapture* me = (VideoCapture*) self; //TODO: check for NULL
union {double prop; const char* name;} u; union {double prop; const char* name;} u;
u.prop = me->get(CV_CAP_PROP_SUPPORTED_PREVIEW_SIZES_STRING); u.prop = me->get(CAP_PROP_ANDROID_PREVIEW_SIZES_STRING);
return env->NewStringUTF(u.name); return env->NewStringUTF(u.name);
} catch(cv::Exception e) { } catch(cv::Exception e) {
@ -432,4 +431,4 @@ JNIEXPORT void JNICALL Java_org_opencv_highgui_VideoCapture_n_1delete
} // extern "C" } // extern "C"
#endif // HAVE_OPENCV_HIGHGUI #endif // HAVE_OPENCV_HIGHGUI

View File

@ -44,7 +44,7 @@ JNIEXPORT void JNICALL Java_org_opencv_android_Utils_nBitmapToMat2
// info.format == ANDROID_BITMAP_FORMAT_RGB_565 // info.format == ANDROID_BITMAP_FORMAT_RGB_565
LOGD("nBitmapToMat: RGB_565 -> CV_8UC4"); LOGD("nBitmapToMat: RGB_565 -> CV_8UC4");
Mat tmp(info.height, info.width, CV_8UC2, pixels); Mat tmp(info.height, info.width, CV_8UC2, pixels);
cvtColor(tmp, dst, CV_BGR5652RGBA); cvtColor(tmp, dst, COLOR_BGR5652RGBA);
} }
AndroidBitmap_unlockPixels(env, bitmap); AndroidBitmap_unlockPixels(env, bitmap);
return; return;
@ -104,10 +104,10 @@ JNIEXPORT void JNICALL Java_org_opencv_android_Utils_nMatToBitmap2
if(src.type() == CV_8UC1) if(src.type() == CV_8UC1)
{ {
LOGD("nMatToBitmap: CV_8UC1 -> RGBA_8888"); LOGD("nMatToBitmap: CV_8UC1 -> RGBA_8888");
cvtColor(src, tmp, CV_GRAY2RGBA); cvtColor(src, tmp, COLOR_GRAY2RGBA);
} else if(src.type() == CV_8UC3){ } else if(src.type() == CV_8UC3){
LOGD("nMatToBitmap: CV_8UC3 -> RGBA_8888"); LOGD("nMatToBitmap: CV_8UC3 -> RGBA_8888");
cvtColor(src, tmp, CV_RGB2RGBA); cvtColor(src, tmp, COLOR_RGB2RGBA);
} else if(src.type() == CV_8UC4){ } else if(src.type() == CV_8UC4){
LOGD("nMatToBitmap: CV_8UC4 -> RGBA_8888"); LOGD("nMatToBitmap: CV_8UC4 -> RGBA_8888");
if(needPremultiplyAlpha) cvtColor(src, tmp, COLOR_RGBA2mRGBA); if(needPremultiplyAlpha) cvtColor(src, tmp, COLOR_RGBA2mRGBA);
@ -119,13 +119,13 @@ JNIEXPORT void JNICALL Java_org_opencv_android_Utils_nMatToBitmap2
if(src.type() == CV_8UC1) if(src.type() == CV_8UC1)
{ {
LOGD("nMatToBitmap: CV_8UC1 -> RGB_565"); LOGD("nMatToBitmap: CV_8UC1 -> RGB_565");
cvtColor(src, tmp, CV_GRAY2BGR565); cvtColor(src, tmp, COLOR_GRAY2BGR565);
} else if(src.type() == CV_8UC3){ } else if(src.type() == CV_8UC3){
LOGD("nMatToBitmap: CV_8UC3 -> RGB_565"); LOGD("nMatToBitmap: CV_8UC3 -> RGB_565");
cvtColor(src, tmp, CV_RGB2BGR565); cvtColor(src, tmp, COLOR_RGB2BGR565);
} else if(src.type() == CV_8UC4){ } else if(src.type() == CV_8UC4){
LOGD("nMatToBitmap: CV_8UC4 -> RGB_565"); LOGD("nMatToBitmap: CV_8UC4 -> RGB_565");
cvtColor(src, tmp, CV_RGBA2BGR565); cvtColor(src, tmp, COLOR_RGBA2BGR565);
} }
} }
AndroidBitmap_unlockPixels(env, bitmap); AndroidBitmap_unlockPixels(env, bitmap);

View File

@ -42,7 +42,6 @@
#ifndef __OPENCV_LEGACY_HPP__ #ifndef __OPENCV_LEGACY_HPP__
#define __OPENCV_LEGACY_HPP__ #define __OPENCV_LEGACY_HPP__
#include "opencv2/imgproc.hpp"
#include "opencv2/imgproc/imgproc_c.h" #include "opencv2/imgproc/imgproc_c.h"
#include "opencv2/features2d.hpp" #include "opencv2/features2d.hpp"
#include "opencv2/calib3d.hpp" #include "opencv2/calib3d.hpp"

View File

@ -45,4 +45,4 @@
#error this is a compatibility header which should not be used inside the OpenCV library #error this is a compatibility header which should not be used inside the OpenCV library
#endif #endif
#include "opencv2/legacy.hpp" #include "opencv2/legacy.hpp"

View File

@ -669,7 +669,7 @@ namespace cv{
cvConvertScale(m_samples[i], patch, 255/maxval); cvConvertScale(m_samples[i], patch, 255/maxval);
#ifdef HAVE_OPENCV_HIGHGUI #ifdef HAVE_OPENCV_HIGHGUI
cvSaveImage(buf, patch); cv::imwrite(buf, cv::cvarrToMat(patch));
#else #else
CV_Error(CV_StsNotImplemented, "OpenCV has been compiled without image I/O support"); CV_Error(CV_StsNotImplemented, "OpenCV has been compiled without image I/O support");
#endif #endif
@ -1801,17 +1801,16 @@ namespace cv{
sprintf(filename, "%s/%s", path, imagename); sprintf(filename, "%s/%s", path, imagename);
//printf("Reading image %s...", filename); //printf("Reading image %s...", filename);
IplImage* img = 0; IplImage img;
#ifdef HAVE_OPENCV_HIGHGUI #ifdef HAVE_OPENCV_HIGHGUI
img = cvLoadImage(filename, CV_LOAD_IMAGE_GRAYSCALE); Mat img2 = cv::imread(filename, IMREAD_GRAYSCALE);
img = img2;
#else #else
CV_Error(CV_StsNotImplemented, "OpenCV has been compiled without image I/O support"); CV_Error(CV_StsNotImplemented, "OpenCV has been compiled without image I/O support");
#endif #endif
//printf("done\n"); //printf("done\n");
extractPatches (img, patches, patch_size); extractPatches (&img, patches, patch_size);
cvReleaseImage(&img);
} }
fclose(pFile); fclose(pFile);
} }

View File

@ -732,10 +732,10 @@ void SIFT::operator()(InputArray _image, InputArray _mask,
Mat image = _image.getMat(), mask = _mask.getMat(); Mat image = _image.getMat(), mask = _mask.getMat();
if( image.empty() || image.depth() != CV_8U ) if( image.empty() || image.depth() != CV_8U )
CV_Error( CV_StsBadArg, "image is empty or has incorrect depth (!=CV_8U)" ); CV_Error( Error::StsBadArg, "image is empty or has incorrect depth (!=CV_8U)" );
if( !mask.empty() && mask.type() != CV_8UC1 ) if( !mask.empty() && mask.type() != CV_8UC1 )
CV_Error( CV_StsBadArg, "mask has incorrect type (!=CV_8UC1)" ); CV_Error( Error::StsBadArg, "mask has incorrect type (!=CV_8UC1)" );
if( useProvidedKeypoints ) if( useProvidedKeypoints )
{ {

View File

@ -427,7 +427,7 @@ void SURFFindInvoker::findMaximaInLayer( const Mat& sum, const Mat& mask_sum,
float center_j = sum_j + (size-1)*0.5f; float center_j = sum_j + (size-1)*0.5f;
KeyPoint kpt( center_j, center_i, (float)sizes[layer], KeyPoint kpt( center_j, center_i, (float)sizes[layer],
-1, val0, octave, CV_SIGN(trace_ptr[j]) ); -1, val0, octave, (trace_ptr[j] > 0) - (trace_ptr[j] < 0) );
/* Interpolate maxima location within the 3x3x3 neighbourhood */ /* Interpolate maxima location within the 3x3x3 neighbourhood */
int ds = size - sizes[layer-1]; int ds = size - sizes[layer-1];
@ -550,7 +550,7 @@ struct SURFInvoker
{ {
if( i*i + j*j <= ORI_RADIUS*ORI_RADIUS ) if( i*i + j*j <= ORI_RADIUS*ORI_RADIUS )
{ {
apt[nOriSamples] = cvPoint(i,j); apt[nOriSamples] = Point(i,j);
aptw[nOriSamples++] = G_ori.at<float>(i+ORI_RADIUS,0) * G_ori.at<float>(j+ORI_RADIUS,0); aptw[nOriSamples++] = G_ori.at<float>(i+ORI_RADIUS,0) * G_ori.at<float>(j+ORI_RADIUS,0);
} }
} }
@ -580,9 +580,6 @@ struct SURFInvoker
float X[nOriSampleBound], Y[nOriSampleBound], angle[nOriSampleBound]; float X[nOriSampleBound], Y[nOriSampleBound], angle[nOriSampleBound];
uchar PATCH[PATCH_SZ+1][PATCH_SZ+1]; uchar PATCH[PATCH_SZ+1][PATCH_SZ+1];
float DX[PATCH_SZ][PATCH_SZ], DY[PATCH_SZ][PATCH_SZ]; float DX[PATCH_SZ][PATCH_SZ], DY[PATCH_SZ][PATCH_SZ];
CvMat matX = cvMat(1, nOriSampleBound, CV_32F, X);
CvMat matY = cvMat(1, nOriSampleBound, CV_32F, Y);
CvMat _angle = cvMat(1, nOriSampleBound, CV_32F, angle);
Mat _patch(PATCH_SZ+1, PATCH_SZ+1, CV_8U, PATCH); Mat _patch(PATCH_SZ+1, PATCH_SZ+1, CV_8U, PATCH);
int dsize = extended ? 128 : 64; int dsize = extended ? 128 : 64;
@ -594,7 +591,8 @@ struct SURFInvoker
maxSize = std::max(maxSize, (*keypoints)[k].size); maxSize = std::max(maxSize, (*keypoints)[k].size);
} }
int imaxSize = std::max(cvCeil((PATCH_SZ+1)*maxSize*1.2f/9.0f), 1); int imaxSize = std::max(cvCeil((PATCH_SZ+1)*maxSize*1.2f/9.0f), 1);
Ptr<CvMat> winbuf = cvCreateMat( 1, imaxSize*imaxSize, CV_8U ); cv::AutoBuffer<uchar> winbuf(imaxSize*imaxSize);
for( k = k1; k < k2; k++ ) for( k = k1; k < k2; k++ )
{ {
int i, j, kk, nangle; int i, j, kk, nangle;
@ -648,8 +646,8 @@ struct SURFInvoker
kp.size = -1; kp.size = -1;
continue; continue;
} }
matX.cols = matY.cols = _angle.cols = nangle;
cvCartToPolar( &matX, &matY, 0, &_angle, 1 ); phase( Mat(1, nangle, CV_32F, X), Mat(1, nangle, CV_32F, Y), Mat(1, nangle, CV_32F, angle), true );
float bestx = 0, besty = 0, descriptor_mod = 0; float bestx = 0, besty = 0, descriptor_mod = 0;
for( i = 0; i < 360; i += SURF_ORI_SEARCH_INC ) for( i = 0; i < 360; i += SURF_ORI_SEARCH_INC )
@ -680,8 +678,8 @@ struct SURFInvoker
/* Extract a window of pixels around the keypoint of size 20s */ /* Extract a window of pixels around the keypoint of size 20s */
int win_size = (int)((PATCH_SZ+1)*s); int win_size = (int)((PATCH_SZ+1)*s);
CV_Assert( winbuf->cols >= win_size*win_size ); CV_Assert( imaxSize >= win_size );
Mat win(win_size, win_size, CV_8U, winbuf->data.ptr); Mat win(win_size, win_size, CV_8U, winbuf);
if( !upright ) if( !upright )
{ {

View File

@ -113,10 +113,10 @@ void showOrig(const Mat& img, const vector<KeyPoint>& orig_pts)
{ {
Mat img_color; Mat img_color;
cvtColor(img, img_color, CV_GRAY2BGR); cvtColor(img, img_color, COLOR_GRAY2BGR);
for(size_t i = 0; i < orig_pts.size(); ++i) for(size_t i = 0; i < orig_pts.size(); ++i)
circle(img_color, orig_pts[i].pt, (int)orig_pts[i].size/2, CV_RGB(0, 255, 0)); circle(img_color, orig_pts[i].pt, (int)orig_pts[i].size/2, Scalar(0, 255, 0));
namedWindow("O"); imshow("O", img_color); namedWindow("O"); imshow("O", img_color);
} }
@ -125,13 +125,13 @@ void show(const string& name, const Mat& new_img, const vector<KeyPoint>& new_pt
{ {
Mat new_img_color; Mat new_img_color;
cvtColor(new_img, new_img_color, CV_GRAY2BGR); cvtColor(new_img, new_img_color, COLOR_GRAY2BGR);
for(size_t i = 0; i < transf_pts.size(); ++i) for(size_t i = 0; i < transf_pts.size(); ++i)
circle(new_img_color, transf_pts[i].pt, (int)transf_pts[i].size/2, CV_RGB(255, 0, 0)); circle(new_img_color, transf_pts[i].pt, (int)transf_pts[i].size/2, Scalar(255, 0, 0));
for(size_t i = 0; i < new_pts.size(); ++i) for(size_t i = 0; i < new_pts.size(); ++i)
circle(new_img_color, new_pts[i].pt, (int)new_pts[i].size/2, CV_RGB(0, 0, 255)); circle(new_img_color, new_pts[i].pt, (int)new_pts[i].size/2, Scalar(0, 0, 255));
namedWindow(name + "_T"); imshow(name + "_T", new_img_color); namedWindow(name + "_T"); imshow(name + "_T", new_img_color);
} }

View File

@ -232,7 +232,7 @@ protected:
featureDetector->detect(image0, keypoints0); featureDetector->detect(image0, keypoints0);
removeVerySmallKeypoints(keypoints0); removeVerySmallKeypoints(keypoints0);
if(keypoints0.size() < 15) if(keypoints0.size() < 15)
CV_Error(CV_StsAssert, "Detector gives too few points in a test image\n"); CV_Error(Error::StsAssert, "Detector gives too few points in a test image\n");
const int maxAngle = 360, angleStep = 15; const int maxAngle = 360, angleStep = 15;
for(int angle = 0; angle < maxAngle; angle += angleStep) for(int angle = 0; angle < maxAngle; angle += angleStep)
@ -262,7 +262,7 @@ protected:
float angle0 = keypoints0[matches[m].queryIdx].angle; float angle0 = keypoints0[matches[m].queryIdx].angle;
float angle1 = keypoints1[matches[m].trainIdx].angle; float angle1 = keypoints1[matches[m].trainIdx].angle;
if(angle0 == -1 || angle1 == -1) if(angle0 == -1 || angle1 == -1)
CV_Error(CV_StsBadArg, "Given FeatureDetector is not rotation invariant, it can not be tested here.\n"); CV_Error(Error::StsBadArg, "Given FeatureDetector is not rotation invariant, it can not be tested here.\n");
CV_Assert(angle0 >= 0.f && angle0 < 360.f); CV_Assert(angle0 >= 0.f && angle0 < 360.f);
CV_Assert(angle1 >= 0.f && angle1 < 360.f); CV_Assert(angle1 >= 0.f && angle1 < 360.f);
@ -347,7 +347,7 @@ protected:
featureDetector->detect(image0, keypoints0); featureDetector->detect(image0, keypoints0);
removeVerySmallKeypoints(keypoints0); removeVerySmallKeypoints(keypoints0);
if(keypoints0.size() < 15) if(keypoints0.size() < 15)
CV_Error(CV_StsAssert, "Detector gives too few points in a test image\n"); CV_Error(Error::StsAssert, "Detector gives too few points in a test image\n");
descriptorExtractor->compute(image0, keypoints0, descriptors0); descriptorExtractor->compute(image0, keypoints0, descriptors0);
BFMatcher bfmatcher(normType); BFMatcher bfmatcher(normType);
@ -432,7 +432,7 @@ protected:
featureDetector->detect(image0, keypoints0); featureDetector->detect(image0, keypoints0);
removeVerySmallKeypoints(keypoints0); removeVerySmallKeypoints(keypoints0);
if(keypoints0.size() < 15) if(keypoints0.size() < 15)
CV_Error(CV_StsAssert, "Detector gives too few points in a test image\n"); CV_Error(Error::StsAssert, "Detector gives too few points in a test image\n");
for(int scaleIdx = 1; scaleIdx <= 3; scaleIdx++) for(int scaleIdx = 1; scaleIdx <= 3; scaleIdx++)
{ {
@ -444,7 +444,7 @@ protected:
featureDetector->detect(image1, keypoints1); featureDetector->detect(image1, keypoints1);
removeVerySmallKeypoints(keypoints1); removeVerySmallKeypoints(keypoints1);
if(keypoints1.size() < 15) if(keypoints1.size() < 15)
CV_Error(CV_StsAssert, "Detector gives too few points in a test image\n"); CV_Error(Error::StsAssert, "Detector gives too few points in a test image\n");
if(keypoints1.size() > keypoints0.size()) if(keypoints1.size() > keypoints0.size())
{ {
@ -553,7 +553,7 @@ protected:
featureDetector->detect(image0, keypoints0); featureDetector->detect(image0, keypoints0);
removeVerySmallKeypoints(keypoints0); removeVerySmallKeypoints(keypoints0);
if(keypoints0.size() < 15) if(keypoints0.size() < 15)
CV_Error(CV_StsAssert, "Detector gives too few points in a test image\n"); CV_Error(Error::StsAssert, "Detector gives too few points in a test image\n");
Mat descriptors0; Mat descriptors0;
descriptorExtractor->compute(image0, keypoints0, descriptors0); descriptorExtractor->compute(image0, keypoints0, descriptors0);

View File

@ -191,7 +191,7 @@ Detects objects of different sizes in the input image. The detected objects are
.. ocv:function:: void CascadeClassifier::detectMultiScale( const Mat& image, vector<Rect>& objects, double scaleFactor=1.1, int minNeighbors=3, int flags=0, Size minSize=Size(), Size maxSize=Size()) .. ocv:function:: void CascadeClassifier::detectMultiScale( const Mat& image, vector<Rect>& objects, double scaleFactor=1.1, int minNeighbors=3, int flags=0, Size minSize=Size(), Size maxSize=Size())
.. ocv:pyfunction:: cv2.CascadeClassifier.detectMultiScale(image[, scaleFactor[, minNeighbors[, flags[, minSize[, maxSize]]]]]) -> objects .. ocv:pyfunction:: cv2.CascadeClassifier.detectMultiScale(image[, scaleFactor[, minNeighbors[, flags[, minSize[, maxSize]]]]]) -> objects
.. ocv:pyfunction:: cv2.CascadeClassifier.detectMultiScale(image, rejectLevels, levelWeights[, scaleFactor[, minNeighbors[, flags[, minSize[, maxSize[, outputRejectLevels]]]]]]) -> objects .. ocv:pyfunction:: cv2.CascadeClassifier.detectMultiScale(image[, scaleFactor[, minNeighbors[, flags[, minSize[, maxSize[, outputRejectLevels]]]]]]) -> objects, rejectLevels, levelWeights
.. ocv:cfunction:: CvSeq* cvHaarDetectObjects( const CvArr* image, CvHaarClassifierCascade* cascade, CvMemStorage* storage, double scale_factor=1.1, int min_neighbors=3, int flags=0, CvSize min_size=cvSize(0,0), CvSize max_size=cvSize(0,0) ) .. ocv:cfunction:: CvSeq* cvHaarDetectObjects( const CvArr* image, CvHaarClassifierCascade* cascade, CvMemStorage* storage, double scale_factor=1.1, int min_neighbors=3, int flags=0, CvSize min_size=cvSize(0,0), CvSize max_size=cvSize(0,0) )

View File

@ -389,7 +389,7 @@ int showRootFilterBoxes(IplImage *image,
color, thickness, line_type, shift); color, thickness, line_type, shift);
} }
#ifdef HAVE_OPENCV_HIGHGUI #ifdef HAVE_OPENCV_HIGHGUI
cvShowImage("Initial image", image); cv::imshow("Initial image", cv::cvarrToMat(image));
#endif #endif
return LATENT_SVM_OK; return LATENT_SVM_OK;
} }
@ -445,7 +445,7 @@ int showPartFilterBoxes(IplImage *image,
} }
} }
#ifdef HAVE_OPENCV_HIGHGUI #ifdef HAVE_OPENCV_HIGHGUI
cvShowImage("Initial image", image); cv::imshow("Initial image", cv::cvarrToMat(image));
#endif #endif
return LATENT_SVM_OK; return LATENT_SVM_OK;
} }
@ -481,7 +481,7 @@ int showBoxes(IplImage *img,
color, thickness, line_type, shift); color, thickness, line_type, shift);
} }
#ifdef HAVE_OPENCV_HIGHGUI #ifdef HAVE_OPENCV_HIGHGUI
cvShowImage("Initial image", img); cv::imshow("Initial image", cv::cvarrToMat(img));
#endif #endif
return LATENT_SVM_OK; return LATENT_SVM_OK;
} }

View File

@ -434,7 +434,7 @@ int CV_CascadeDetectorTest::detectMultiScale_C( const string& filename,
return cvtest::TS::FAIL_INVALID_TEST_DATA; return cvtest::TS::FAIL_INVALID_TEST_DATA;
} }
Mat grayImg; Mat grayImg;
cvtColor( img, grayImg, CV_BGR2GRAY ); cvtColor( img, grayImg, COLOR_BGR2GRAY );
equalizeHist( grayImg, grayImg ); equalizeHist( grayImg, grayImg );
CvMat c_gray = grayImg; CvMat c_gray = grayImg;
@ -469,7 +469,7 @@ int CV_CascadeDetectorTest::detectMultiScale( int di, const Mat& img,
return cvtest::TS::FAIL_INVALID_TEST_DATA; return cvtest::TS::FAIL_INVALID_TEST_DATA;
} }
Mat grayImg; Mat grayImg;
cvtColor( img, grayImg, CV_BGR2GRAY ); cvtColor( img, grayImg, COLOR_BGR2GRAY );
equalizeHist( grayImg, grayImg ); equalizeHist( grayImg, grayImg );
cascade.detectMultiScale( grayImg, objects, 1.1, 3, flags[di] ); cascade.detectMultiScale( grayImg, objects, 1.1, 3, flags[di] );
return cvtest::TS::OK; return cvtest::TS::OK;

View File

@ -82,8 +82,9 @@ void CV_LatentSVMDetectorTest::run( int /* start_from */)
init.initialize(numThreads); init.initialize(numThreads);
#endif #endif
IplImage* image = cvLoadImage(img_path.c_str()); Mat image2 = cv::imread(img_path.c_str());
if (!image) IplImage image = image2;
if (image2.empty())
{ {
ts->set_failed_test_info( cvtest::TS::FAIL_INVALID_TEST_DATA ); ts->set_failed_test_info( cvtest::TS::FAIL_INVALID_TEST_DATA );
return; return;
@ -93,13 +94,12 @@ void CV_LatentSVMDetectorTest::run( int /* start_from */)
if (!detector) if (!detector)
{ {
ts->set_failed_test_info( cvtest::TS::FAIL_INVALID_TEST_DATA ); ts->set_failed_test_info( cvtest::TS::FAIL_INVALID_TEST_DATA );
cvReleaseImage(&image);
return; return;
} }
CvMemStorage* storage = cvCreateMemStorage(0); CvMemStorage* storage = cvCreateMemStorage(0);
CvSeq* detections = 0; CvSeq* detections = 0;
detections = cvLatentSvmDetectObjects(image, detector, storage, 0.5f, numThreads); detections = cvLatentSvmDetectObjects(&image, detector, storage, 0.5f, numThreads);
if (detections->total != num_detections) if (detections->total != num_detections)
{ {
ts->set_failed_test_info( cvtest::TS::FAIL_MISMATCH ); ts->set_failed_test_info( cvtest::TS::FAIL_MISMATCH );
@ -124,7 +124,6 @@ void CV_LatentSVMDetectorTest::run( int /* start_from */)
#endif #endif
cvReleaseMemStorage( &storage ); cvReleaseMemStorage( &storage );
cvReleaseLatentSvmDetector( &detector ); cvReleaseLatentSvmDetector( &detector );
cvReleaseImage( &image );
} }
// Test for c++ version of Latent SVM // Test for c++ version of Latent SVM

View File

@ -46,14 +46,14 @@
#ifndef __OPENCV_OCL_PRIVATE_UTIL__ #ifndef __OPENCV_OCL_PRIVATE_UTIL__
#define __OPENCV_OCL_PRIVATE_UTIL__ #define __OPENCV_OCL_PRIVATE_UTIL__
#include "opencv2/ocl.hpp"
#if defined __APPLE__ #if defined __APPLE__
#include <OpenCL/OpenCL.h> #include <OpenCL/OpenCL.h>
#else #else
#include <CL/opencl.h> #include <CL/opencl.h>
#endif #endif
#include "opencv2/ocl.hpp"
namespace cv namespace cv
{ {
namespace ocl namespace ocl

View File

@ -47,7 +47,7 @@
///////////// Canny //////////////////////// ///////////// Canny ////////////////////////
TEST(Canny) TEST(Canny)
{ {
Mat img = imread(abspath("aloeL.jpg"), CV_LOAD_IMAGE_GRAYSCALE); Mat img = imread(abspath("aloeL.jpg"), IMREAD_GRAYSCALE);
if (img.empty()) if (img.empty())
{ {

View File

@ -60,26 +60,26 @@ TEST(cvtColor)
gen(src, size, size, all_type[j], 0, 256); gen(src, size, size, all_type[j], 0, 256);
SUBTEST << size << "x" << size << "; " << type_name[j] << " ; CV_RGBA2GRAY"; SUBTEST << size << "x" << size << "; " << type_name[j] << " ; CV_RGBA2GRAY";
cvtColor(src, dst, CV_RGBA2GRAY, 4); cvtColor(src, dst, COLOR_RGBA2GRAY, 4);
CPU_ON; CPU_ON;
cvtColor(src, dst, CV_RGBA2GRAY, 4); cvtColor(src, dst, COLOR_RGBA2GRAY, 4);
CPU_OFF; CPU_OFF;
d_src.upload(src); d_src.upload(src);
WARMUP_ON; WARMUP_ON;
ocl::cvtColor(d_src, d_dst, CV_RGBA2GRAY, 4); ocl::cvtColor(d_src, d_dst, COLOR_RGBA2GRAY, 4);
WARMUP_OFF; WARMUP_OFF;
GPU_ON; GPU_ON;
ocl::cvtColor(d_src, d_dst, CV_RGBA2GRAY, 4); ocl::cvtColor(d_src, d_dst, COLOR_RGBA2GRAY, 4);
; ;
GPU_OFF; GPU_OFF;
GPU_FULL_ON; GPU_FULL_ON;
d_src.upload(src); d_src.upload(src);
ocl::cvtColor(d_src, d_dst, CV_RGBA2GRAY, 4); ocl::cvtColor(d_src, d_dst, COLOR_RGBA2GRAY, 4);
d_dst.download(dst); d_dst.download(dst);
GPU_FULL_OFF; GPU_FULL_OFF;
} }

View File

@ -84,7 +84,7 @@ public:
} }
TEST(Haar) TEST(Haar)
{ {
Mat img = imread(abspath("basketball1.png"), CV_LOAD_IMAGE_GRAYSCALE); Mat img = imread(abspath("basketball1.png"), IMREAD_GRAYSCALE);
if (img.empty()) if (img.empty())
{ {

View File

@ -75,10 +75,10 @@ TEST(matchTemplate)
gen(templ, templ_size, templ_size, all_type[j], 0, 1); gen(templ, templ_size, templ_size, all_type[j], 0, 1);
matchTemplate(src, templ, dst, CV_TM_CCORR); matchTemplate(src, templ, dst, TM_CCORR);
CPU_ON; CPU_ON;
matchTemplate(src, templ, dst, CV_TM_CCORR); matchTemplate(src, templ, dst, TM_CCORR);
CPU_OFF; CPU_OFF;
ocl::oclMat d_src(src), d_templ, d_dst; ocl::oclMat d_src(src), d_templ, d_dst;
@ -86,18 +86,18 @@ TEST(matchTemplate)
d_templ.upload(templ); d_templ.upload(templ);
WARMUP_ON; WARMUP_ON;
ocl::matchTemplate(d_src, d_templ, d_dst, CV_TM_CCORR); ocl::matchTemplate(d_src, d_templ, d_dst, TM_CCORR);
WARMUP_OFF; WARMUP_OFF;
GPU_ON; GPU_ON;
ocl::matchTemplate(d_src, d_templ, d_dst, CV_TM_CCORR); ocl::matchTemplate(d_src, d_templ, d_dst, TM_CCORR);
; ;
GPU_OFF; GPU_OFF;
GPU_FULL_ON; GPU_FULL_ON;
d_src.upload(src); d_src.upload(src);
d_templ.upload(templ); d_templ.upload(templ);
ocl::matchTemplate(d_src, d_templ, d_dst, CV_TM_CCORR); ocl::matchTemplate(d_src, d_templ, d_dst, TM_CCORR);
d_dst.download(dst); d_dst.download(dst);
GPU_FULL_OFF; GPU_FULL_OFF;
} }
@ -116,28 +116,28 @@ TEST(matchTemplate)
gen(templ, templ_size, templ_size, all_type_8U[j], 0, 255); gen(templ, templ_size, templ_size, all_type_8U[j], 0, 255);
matchTemplate(src, templ, dst, CV_TM_CCORR_NORMED); matchTemplate(src, templ, dst, TM_CCORR_NORMED);
CPU_ON; CPU_ON;
matchTemplate(src, templ, dst, CV_TM_CCORR_NORMED); matchTemplate(src, templ, dst, TM_CCORR_NORMED);
CPU_OFF; CPU_OFF;
ocl::oclMat d_src(src); ocl::oclMat d_src(src);
ocl::oclMat d_templ(templ), d_dst; ocl::oclMat d_templ(templ), d_dst;
WARMUP_ON; WARMUP_ON;
ocl::matchTemplate(d_src, d_templ, d_dst, CV_TM_CCORR_NORMED); ocl::matchTemplate(d_src, d_templ, d_dst, TM_CCORR_NORMED);
WARMUP_OFF; WARMUP_OFF;
GPU_ON; GPU_ON;
ocl::matchTemplate(d_src, d_templ, d_dst, CV_TM_CCORR_NORMED); ocl::matchTemplate(d_src, d_templ, d_dst, TM_CCORR_NORMED);
; ;
GPU_OFF; GPU_OFF;
GPU_FULL_ON; GPU_FULL_ON;
d_src.upload(src); d_src.upload(src);
d_templ.upload(templ); d_templ.upload(templ);
ocl::matchTemplate(d_src, d_templ, d_dst, CV_TM_CCORR_NORMED); ocl::matchTemplate(d_src, d_templ, d_dst, TM_CCORR_NORMED);
d_dst.download(dst); d_dst.download(dst);
GPU_FULL_OFF; GPU_FULL_OFF;
} }

View File

@ -44,6 +44,9 @@
//M*/ //M*/
#include "precomp.hpp" #include "precomp.hpp"
using namespace cv;
#ifdef HAVE_OPENCL #ifdef HAVE_OPENCL
//#define MAT_DEBUG //#define MAT_DEBUG
@ -181,13 +184,13 @@ INSTANTIATE_TEST_CASE_P(OCL_ImgProc, CvtColor, testing::Combine(
INSTANTIATE_TEST_CASE_P(OCL_ImgProc, CvtColor_YUV420, testing::Combine( INSTANTIATE_TEST_CASE_P(OCL_ImgProc, CvtColor_YUV420, testing::Combine(
testing::Values(cv::Size(128, 45), cv::Size(46, 132), cv::Size(1024, 1023)), testing::Values(cv::Size(128, 45), cv::Size(46, 132), cv::Size(1024, 1023)),
testing::Values((int)CV_YUV2RGBA_NV12, (int)CV_YUV2BGRA_NV12, (int)CV_YUV2RGB_NV12, (int)CV_YUV2BGR_NV12) testing::Values((int)COLOR_YUV2RGBA_NV12, (int)COLOR_YUV2BGRA_NV12, (int)COLOR_YUV2RGB_NV12, (int)COLOR_YUV2BGR_NV12)
)); ));
INSTANTIATE_TEST_CASE_P(OCL_ImgProc, CvtColor_Gray2RGB, testing::Combine( INSTANTIATE_TEST_CASE_P(OCL_ImgProc, CvtColor_Gray2RGB, testing::Combine(
DIFFERENT_SIZES, DIFFERENT_SIZES,
testing::Values(MatDepth(CV_8U), MatDepth(CV_16U), MatDepth(CV_32F)), testing::Values(MatDepth(CV_8U), MatDepth(CV_16U), MatDepth(CV_32F)),
testing::Values((int)CV_GRAY2BGR, (int)CV_GRAY2BGRA, (int)CV_GRAY2RGB, (int)CV_GRAY2RGBA) testing::Values((int)COLOR_GRAY2BGR, (int)COLOR_GRAY2BGRA, (int)COLOR_GRAY2RGB, (int)COLOR_GRAY2RGBA)
)); ));
} }
#endif #endif

View File

@ -121,7 +121,7 @@ TEST_F(Haar, FaceDetect)
Mat gray, smallImg(cvRound (img.rows / scale), cvRound(img.cols / scale), CV_8UC1 ); Mat gray, smallImg(cvRound (img.rows / scale), cvRound(img.cols / scale), CV_8UC1 );
MemStorage storage(cvCreateMemStorage(0)); MemStorage storage(cvCreateMemStorage(0));
cvtColor( img, gray, CV_BGR2GRAY ); cvtColor( img, gray, COLOR_BGR2GRAY );
resize( gray, smallImg, smallImg.size(), 0, 0, INTER_LINEAR ); resize( gray, smallImg, smallImg.size(), 0, 0, INTER_LINEAR );
equalizeHist( smallImg, smallImg ); equalizeHist( smallImg, smallImg );

View File

@ -44,7 +44,7 @@
//M*/ //M*/
#include "precomp.hpp" #include "precomp.hpp"
#include "opencv2/core.hpp"
using namespace std; using namespace std;
#ifdef HAVE_OPENCL #ifdef HAVE_OPENCL
@ -71,11 +71,11 @@ TEST_P(HOG, GetDescriptors)
switch (type) switch (type)
{ {
case CV_8UC1: case CV_8UC1:
cv::cvtColor(img_rgb, img, CV_BGR2GRAY); cv::cvtColor(img_rgb, img, cv::COLOR_BGR2GRAY);
break; break;
case CV_8UC4: case CV_8UC4:
default: default:
cv::cvtColor(img_rgb, img, CV_BGR2BGRA); cv::cvtColor(img_rgb, img, cv::COLOR_BGR2BGRA);
break; break;
} }
cv::ocl::oclMat d_img(img); cv::ocl::oclMat d_img(img);
@ -128,11 +128,11 @@ TEST_P(HOG, Detect)
switch (type) switch (type)
{ {
case CV_8UC1: case CV_8UC1:
cv::cvtColor(img_rgb, img, CV_BGR2GRAY); cv::cvtColor(img_rgb, img, cv::COLOR_BGR2GRAY);
break; break;
case CV_8UC4: case CV_8UC4:
default: default:
cv::cvtColor(img_rgb, img, CV_BGR2BGRA); cv::cvtColor(img_rgb, img, cv::COLOR_BGR2BGRA);
break; break;
} }
cv::ocl::oclMat d_img(img); cv::ocl::oclMat d_img(img);

View File

@ -245,7 +245,7 @@ double checkNorm(const Mat &m1, const Mat &m2)
double checkSimilarity(const Mat &m1, const Mat &m2) double checkSimilarity(const Mat &m1, const Mat &m2)
{ {
Mat diff; Mat diff;
matchTemplate(m1, m2, diff, CV_TM_CCORR_NORMED); matchTemplate(m1, m2, diff, TM_CCORR_NORMED);
return std::abs(diff.at<float>(0, 0) - 1.f); return std::abs(diff.at<float>(0, 0) - 1.f);
} }

View File

@ -46,10 +46,6 @@
#include "opencv2/core.hpp" #include "opencv2/core.hpp"
#include "opencv2/imgproc.hpp" #include "opencv2/imgproc.hpp"
#include "opencv2/photo/photo_c.h"
#ifdef __cplusplus
/*! \namespace cv /*! \namespace cv
Namespace where all the C++ OpenCV functionality resides Namespace where all the C++ OpenCV functionality resides
*/ */
@ -59,8 +55,8 @@ namespace cv
//! the inpainting algorithm //! the inpainting algorithm
enum enum
{ {
INPAINT_NS=CV_INPAINT_NS, // Navier-Stokes algorithm INPAINT_NS = 0, // Navier-Stokes algorithm
INPAINT_TELEA=CV_INPAINT_TELEA // A. Telea algorithm INPAINT_TELEA = 1 // A. Telea algorithm
}; };
//! restores the damaged image areas using one of the available intpainting algorithms //! restores the damaged image areas using one of the available intpainting algorithms
@ -84,8 +80,6 @@ CV_EXPORTS_W void fastNlMeansDenoisingColoredMulti( InputArrayOfArrays srcImgs,
float h = 3, float hColor = 3, float h = 3, float hColor = 3,
int templateWindowSize = 7, int searchWindowSize = 21); int templateWindowSize = 7, int searchWindowSize = 21);
} } // cv
#endif //__cplusplus
#endif #endif

View File

@ -74,7 +74,7 @@ void cv::fastNlMeansDenoising( InputArray _src, OutputArray _dst, float h,
src, dst, templateWindowSize, searchWindowSize, h)); src, dst, templateWindowSize, searchWindowSize, h));
break; break;
default: default:
CV_Error(CV_StsBadArg, CV_Error(Error::StsBadArg,
"Unsupported image format! Only CV_8UC1, CV_8UC2 and CV_8UC3 are supported"); "Unsupported image format! Only CV_8UC1, CV_8UC2 and CV_8UC3 are supported");
} }
} }
@ -88,12 +88,12 @@ void cv::fastNlMeansDenoisingColored( InputArray _src, OutputArray _dst,
Mat dst = _dst.getMat(); Mat dst = _dst.getMat();
if (src.type() != CV_8UC3) { if (src.type() != CV_8UC3) {
CV_Error(CV_StsBadArg, "Type of input image should be CV_8UC3!"); CV_Error(Error::StsBadArg, "Type of input image should be CV_8UC3!");
return; return;
} }
Mat src_lab; Mat src_lab;
cvtColor(src, src_lab, CV_LBGR2Lab); cvtColor(src, src_lab, COLOR_LBGR2Lab);
Mat l(src.size(), CV_8U); Mat l(src.size(), CV_8U);
Mat ab(src.size(), CV_8UC2); Mat ab(src.size(), CV_8UC2);
@ -108,7 +108,7 @@ void cv::fastNlMeansDenoisingColored( InputArray _src, OutputArray _dst,
Mat dst_lab(src.size(), src.type()); Mat dst_lab(src.size(), src.type());
mixChannels(l_ab_denoised, 2, &dst_lab, 1, from_to, 3); mixChannels(l_ab_denoised, 2, &dst_lab, 1, from_to, 3);
cvtColor(dst_lab, dst, CV_Lab2LBGR); cvtColor(dst_lab, dst, COLOR_Lab2LBGR);
} }
static void fastNlMeansDenoisingMultiCheckPreconditions( static void fastNlMeansDenoisingMultiCheckPreconditions(
@ -118,27 +118,27 @@ static void fastNlMeansDenoisingMultiCheckPreconditions(
{ {
int src_imgs_size = (int)srcImgs.size(); int src_imgs_size = (int)srcImgs.size();
if (src_imgs_size == 0) { if (src_imgs_size == 0) {
CV_Error(CV_StsBadArg, "Input images vector should not be empty!"); CV_Error(Error::StsBadArg, "Input images vector should not be empty!");
} }
if (temporalWindowSize % 2 == 0 || if (temporalWindowSize % 2 == 0 ||
searchWindowSize % 2 == 0 || searchWindowSize % 2 == 0 ||
templateWindowSize % 2 == 0) { templateWindowSize % 2 == 0) {
CV_Error(CV_StsBadArg, "All windows sizes should be odd!"); CV_Error(Error::StsBadArg, "All windows sizes should be odd!");
} }
int temporalWindowHalfSize = temporalWindowSize / 2; int temporalWindowHalfSize = temporalWindowSize / 2;
if (imgToDenoiseIndex - temporalWindowHalfSize < 0 || if (imgToDenoiseIndex - temporalWindowHalfSize < 0 ||
imgToDenoiseIndex + temporalWindowHalfSize >= src_imgs_size) imgToDenoiseIndex + temporalWindowHalfSize >= src_imgs_size)
{ {
CV_Error(CV_StsBadArg, CV_Error(Error::StsBadArg,
"imgToDenoiseIndex and temporalWindowSize " "imgToDenoiseIndex and temporalWindowSize "
"should be choosen corresponding srcImgs size!"); "should be choosen corresponding srcImgs size!");
} }
for (int i = 1; i < src_imgs_size; i++) { for (int i = 1; i < src_imgs_size; i++) {
if (srcImgs[0].size() != srcImgs[i].size() || srcImgs[0].type() != srcImgs[i].type()) { if (srcImgs[0].size() != srcImgs[i].size() || srcImgs[0].type() != srcImgs[i].type()) {
CV_Error(CV_StsBadArg, "Input images should have the same size and type!"); CV_Error(Error::StsBadArg, "Input images should have the same size and type!");
} }
} }
} }
@ -177,7 +177,7 @@ void cv::fastNlMeansDenoisingMulti( InputArrayOfArrays _srcImgs, OutputArray _ds
dst, templateWindowSize, searchWindowSize, h)); dst, templateWindowSize, searchWindowSize, h));
break; break;
default: default:
CV_Error(CV_StsBadArg, CV_Error(Error::StsBadArg,
"Unsupported matrix format! Only uchar, Vec2b, Vec3b are supported"); "Unsupported matrix format! Only uchar, Vec2b, Vec3b are supported");
} }
} }
@ -201,7 +201,7 @@ void cv::fastNlMeansDenoisingColoredMulti( InputArrayOfArrays _srcImgs, OutputAr
int src_imgs_size = (int)srcImgs.size(); int src_imgs_size = (int)srcImgs.size();
if (srcImgs[0].type() != CV_8UC3) { if (srcImgs[0].type() != CV_8UC3) {
CV_Error(CV_StsBadArg, "Type of input images should be CV_8UC3!"); CV_Error(Error::StsBadArg, "Type of input images should be CV_8UC3!");
return; return;
} }
@ -215,7 +215,7 @@ void cv::fastNlMeansDenoisingColoredMulti( InputArrayOfArrays _srcImgs, OutputAr
src_lab[i] = Mat::zeros(srcImgs[0].size(), CV_8UC3); src_lab[i] = Mat::zeros(srcImgs[0].size(), CV_8UC3);
l[i] = Mat::zeros(srcImgs[0].size(), CV_8UC1); l[i] = Mat::zeros(srcImgs[0].size(), CV_8UC1);
ab[i] = Mat::zeros(srcImgs[0].size(), CV_8UC2); ab[i] = Mat::zeros(srcImgs[0].size(), CV_8UC2);
cvtColor(srcImgs[i], src_lab[i], CV_LBGR2Lab); cvtColor(srcImgs[i], src_lab[i], COLOR_LBGR2Lab);
Mat l_ab[] = { l[i], ab[i] }; Mat l_ab[] = { l[i], ab[i] };
mixChannels(&src_lab[i], 1, l_ab, 2, from_to, 3); mixChannels(&src_lab[i], 1, l_ab, 2, from_to, 3);
@ -236,7 +236,7 @@ void cv::fastNlMeansDenoisingColoredMulti( InputArrayOfArrays _srcImgs, OutputAr
Mat dst_lab(srcImgs[0].size(), srcImgs[0].type()); Mat dst_lab(srcImgs[0].size(), srcImgs[0].type());
mixChannels(l_ab_denoised, 2, &dst_lab, 1, from_to, 3); mixChannels(l_ab_denoised, 2, &dst_lab, 1, from_to, 3);
cvtColor(dst_lab, dst, CV_Lab2LBGR); cvtColor(dst_lab, dst, COLOR_Lab2LBGR);
} }

View File

@ -47,6 +47,7 @@
#include "precomp.hpp" #include "precomp.hpp"
#include "opencv2/imgproc/imgproc_c.h" #include "opencv2/imgproc/imgproc_c.h"
#include "opencv2/photo/photo_c.h"
#undef CV_MAT_ELEM_PTR_FAST #undef CV_MAT_ELEM_PTR_FAST
#define CV_MAT_ELEM_PTR_FAST( mat, row, col, pix_size ) \ #define CV_MAT_ELEM_PTR_FAST( mat, row, col, pix_size ) \
@ -716,6 +717,12 @@ icvNSInpaintFMM(const CvMat *f, CvMat *t, CvMat *out, int range, CvPriorityQueue
}\ }\
} }
namespace cv {
template<> void cv::Ptr<IplConvKernel>::delete_obj()
{
cvReleaseStructuringElement(&obj);
}
}
void void
cvInpaint( const CvArr* _input_img, const CvArr* _inpaint_mask, CvArr* _output_img, cvInpaint( const CvArr* _input_img, const CvArr* _inpaint_mask, CvArr* _output_img,
@ -776,7 +783,7 @@ cvInpaint( const CvArr* _input_img, const CvArr* _inpaint_mask, CvArr* _output_i
cvSet(f,cvScalar(INSIDE,0,0,0),mask); cvSet(f,cvScalar(INSIDE,0,0,0),mask);
cvSet(t,cvScalar(0,0,0,0),band); cvSet(t,cvScalar(0,0,0,0),band);
if( flags == CV_INPAINT_TELEA ) if( flags == cv::INPAINT_TELEA )
{ {
out = cvCreateMat(erows, ecols, CV_8UC1); out = cvCreateMat(erows, ecols, CV_8UC1);
el_range = cvCreateStructuringElementEx(2*range+1,2*range+1, el_range = cvCreateStructuringElementEx(2*range+1,2*range+1,
@ -793,10 +800,10 @@ cvInpaint( const CvArr* _input_img, const CvArr* _inpaint_mask, CvArr* _output_i
icvCalcFMM(out,t,Out,true); icvCalcFMM(out,t,Out,true);
icvTeleaInpaintFMM(mask,t,output_img,range,Heap); icvTeleaInpaintFMM(mask,t,output_img,range,Heap);
} }
else if (flags == CV_INPAINT_NS) { else if (flags == cv::INPAINT_NS) {
icvNSInpaintFMM(mask,t,output_img,range,Heap); icvNSInpaintFMM(mask,t,output_img,range,Heap);
} else { } else {
CV_Error( CV_StsBadArg, "The flags argument must be one of CV_INPAINT_TELEA or CV_INPAINT_NS" ); CV_Error( cv::Error::StsBadArg, "The flags argument must be one of CV_INPAINT_TELEA or CV_INPAINT_NS" );
} }
} }

View File

@ -62,8 +62,8 @@ TEST(Photo_DenoisingGrayscale, regression)
string original_path = folder + "lena_noised_gaussian_sigma=10.png"; string original_path = folder + "lena_noised_gaussian_sigma=10.png";
string expected_path = folder + "lena_noised_denoised_grayscale_tw=7_sw=21_h=10.png"; string expected_path = folder + "lena_noised_denoised_grayscale_tw=7_sw=21_h=10.png";
Mat original = imread(original_path, CV_LOAD_IMAGE_GRAYSCALE); Mat original = imread(original_path, IMREAD_GRAYSCALE);
Mat expected = imread(expected_path, CV_LOAD_IMAGE_GRAYSCALE); Mat expected = imread(expected_path, IMREAD_GRAYSCALE);
ASSERT_FALSE(original.empty()) << "Could not load input image " << original_path; ASSERT_FALSE(original.empty()) << "Could not load input image " << original_path;
ASSERT_FALSE(expected.empty()) << "Could not load reference image " << expected_path; ASSERT_FALSE(expected.empty()) << "Could not load reference image " << expected_path;
@ -82,8 +82,8 @@ TEST(Photo_DenoisingColored, regression)
string original_path = folder + "lena_noised_gaussian_sigma=10.png"; string original_path = folder + "lena_noised_gaussian_sigma=10.png";
string expected_path = folder + "lena_noised_denoised_lab12_tw=7_sw=21_h=10_h2=10.png"; string expected_path = folder + "lena_noised_denoised_lab12_tw=7_sw=21_h=10_h2=10.png";
Mat original = imread(original_path, CV_LOAD_IMAGE_COLOR); Mat original = imread(original_path, IMREAD_COLOR);
Mat expected = imread(expected_path, CV_LOAD_IMAGE_COLOR); Mat expected = imread(expected_path, IMREAD_COLOR);
ASSERT_FALSE(original.empty()) << "Could not load input image " << original_path; ASSERT_FALSE(original.empty()) << "Could not load input image " << original_path;
ASSERT_FALSE(expected.empty()) << "Could not load reference image " << expected_path; ASSERT_FALSE(expected.empty()) << "Could not load reference image " << expected_path;
@ -102,14 +102,14 @@ TEST(Photo_DenoisingGrayscaleMulti, regression)
string folder = string(cvtest::TS::ptr()->get_data_path()) + "denoising/"; string folder = string(cvtest::TS::ptr()->get_data_path()) + "denoising/";
string expected_path = folder + "lena_noised_denoised_multi_tw=7_sw=21_h=15.png"; string expected_path = folder + "lena_noised_denoised_multi_tw=7_sw=21_h=15.png";
Mat expected = imread(expected_path, CV_LOAD_IMAGE_GRAYSCALE); Mat expected = imread(expected_path, IMREAD_GRAYSCALE);
ASSERT_FALSE(expected.empty()) << "Could not load reference image " << expected_path; ASSERT_FALSE(expected.empty()) << "Could not load reference image " << expected_path;
vector<Mat> original(imgs_count); vector<Mat> original(imgs_count);
for (int i = 0; i < imgs_count; i++) for (int i = 0; i < imgs_count; i++)
{ {
string original_path = format("%slena_noised_gaussian_sigma=20_multi_%d.png", folder.c_str(), i); string original_path = format("%slena_noised_gaussian_sigma=20_multi_%d.png", folder.c_str(), i);
original[i] = imread(original_path, CV_LOAD_IMAGE_GRAYSCALE); original[i] = imread(original_path, IMREAD_GRAYSCALE);
ASSERT_FALSE(original[i].empty()) << "Could not load input image " << original_path; ASSERT_FALSE(original[i].empty()) << "Could not load input image " << original_path;
} }
@ -127,14 +127,14 @@ TEST(Photo_DenoisingColoredMulti, regression)
string folder = string(cvtest::TS::ptr()->get_data_path()) + "denoising/"; string folder = string(cvtest::TS::ptr()->get_data_path()) + "denoising/";
string expected_path = folder + "lena_noised_denoised_multi_lab12_tw=7_sw=21_h=10_h2=15.png"; string expected_path = folder + "lena_noised_denoised_multi_lab12_tw=7_sw=21_h=10_h2=15.png";
Mat expected = imread(expected_path, CV_LOAD_IMAGE_COLOR); Mat expected = imread(expected_path, IMREAD_COLOR);
ASSERT_FALSE(expected.empty()) << "Could not load reference image " << expected_path; ASSERT_FALSE(expected.empty()) << "Could not load reference image " << expected_path;
vector<Mat> original(imgs_count); vector<Mat> original(imgs_count);
for (int i = 0; i < imgs_count; i++) for (int i = 0; i < imgs_count; i++)
{ {
string original_path = format("%slena_noised_gaussian_sigma=20_multi_%d.png", folder.c_str(), i); string original_path = format("%slena_noised_gaussian_sigma=20_multi_%d.png", folder.c_str(), i);
original[i] = imread(original_path, CV_LOAD_IMAGE_COLOR); original[i] = imread(original_path, IMREAD_COLOR);
ASSERT_FALSE(original[i].empty()) << "Could not load input image " << original_path; ASSERT_FALSE(original[i].empty()) << "Could not load input image " << original_path;
} }

View File

@ -78,14 +78,14 @@ void CV_InpaintTest::run( int )
mask.convertTo(inv_mask, CV_8UC3, -1.0, 255.0); mask.convertTo(inv_mask, CV_8UC3, -1.0, 255.0);
Mat mask1ch; Mat mask1ch;
cv::cvtColor(mask, mask1ch, CV_BGR2GRAY); cv::cvtColor(mask, mask1ch, COLOR_BGR2GRAY);
Mat test = orig.clone(); Mat test = orig.clone();
test.setTo(Scalar::all(255), mask1ch); test.setTo(Scalar::all(255), mask1ch);
Mat res1, res2; Mat res1, res2;
inpaint( test, mask1ch, res1, 5, CV_INPAINT_NS ); inpaint( test, mask1ch, res1, 5, INPAINT_NS );
inpaint( test, mask1ch, res2, 5, CV_INPAINT_TELEA ); inpaint( test, mask1ch, res2, 5, INPAINT_TELEA );
Mat diff1, diff2; Mat diff1, diff2;
absdiff( orig, res1, diff1 ); absdiff( orig, res1, diff1 );

View File

@ -23,6 +23,9 @@
#include "opencv2/photo.hpp" #include "opencv2/photo.hpp"
#include "opencv2/highgui.hpp" #include "opencv2/highgui.hpp"
#include "opencv2/highgui/highgui_c.h"
#include "opencv2/photo/photo_c.h"
#include "opencv2/opencv_modules.hpp" #include "opencv2/opencv_modules.hpp"
#ifdef HAVE_OPENCV_NONFREE #ifdef HAVE_OPENCV_NONFREE

View File

@ -1,5 +1,6 @@
#include "opencv2/legacy.hpp" #include "opencv2/legacy.hpp"
#include "opencv2/legacy/compat.hpp" #include "opencv2/legacy/compat.hpp"
#include "opencv2/highgui/highgui_c.h"
#define OLD_MODULESTR "cv2.cv" #define OLD_MODULESTR "cv2.cv"

View File

@ -211,6 +211,7 @@ gen_template_rw_prop_init = Template("""
simple_argtype_mapping = { simple_argtype_mapping = {
"bool": ("bool", "b", "0"), "bool": ("bool", "b", "0"),
"char": ("char", "b", "0"),
"int": ("int", "i", "0"), "int": ("int", "i", "0"),
"float": ("float", "f", "0.f"), "float": ("float", "f", "0.f"),
"double": ("double", "d", "0"), "double": ("double", "d", "0"),

View File

@ -93,8 +93,8 @@ public:
double confThresh() const { return conf_thresh_; } double confThresh() const { return conf_thresh_; }
void setConfThresh(double conf_thresh) { conf_thresh_ = conf_thresh; } void setConfThresh(double conf_thresh) { conf_thresh_ = conf_thresh; }
CvTermCriteria termCriteria() { return term_criteria_; } TermCriteria termCriteria() { return term_criteria_; }
void setTermCriteria(const CvTermCriteria& term_criteria) { term_criteria_ = term_criteria; } void setTermCriteria(const TermCriteria& term_criteria) { term_criteria_ = term_criteria; }
protected: protected:
BundleAdjusterBase(int num_params_per_cam, int num_errs_per_measurement) BundleAdjusterBase(int num_params_per_cam, int num_errs_per_measurement)
@ -103,7 +103,7 @@ protected:
{ {
setRefinementMask(Mat::ones(3, 3, CV_8U)); setRefinementMask(Mat::ones(3, 3, CV_8U));
setConfThresh(1.); setConfThresh(1.);
setTermCriteria(cvTermCriteria(CV_TERMCRIT_EPS + CV_TERMCRIT_ITER, 1000, DBL_EPSILON)); setTermCriteria(TermCriteria(TermCriteria::EPS + TermCriteria::COUNT, 1000, DBL_EPSILON));
} }
// Runs bundle adjustment // Runs bundle adjustment
@ -132,7 +132,7 @@ protected:
double conf_thresh_; double conf_thresh_;
//LevenbergMarquardt algorithm termination criteria //LevenbergMarquardt algorithm termination criteria
CvTermCriteria term_criteria_; TermCriteria term_criteria_;
// Camera parameters matrix (CV_64F) // Camera parameters matrix (CV_64F)
Mat cam_params_; Mat cam_params_;

View File

@ -425,7 +425,7 @@ void normalizeUsingWeightMap(const Mat& weight, Mat& src)
void createWeightMap(const Mat &mask, float sharpness, Mat &weight) void createWeightMap(const Mat &mask, float sharpness, Mat &weight)
{ {
CV_Assert(mask.type() == CV_8U); CV_Assert(mask.type() == CV_8U);
distanceTransform(mask, weight, CV_DIST_L1, 3); distanceTransform(mask, weight, DIST_L1, 3);
threshold(weight * sharpness, weight, 1.f, 1.f, THRESH_TRUNC); threshold(weight * sharpness, weight, 1.f, 1.f, THRESH_TRUNC);
} }

View File

@ -350,7 +350,7 @@ void SurfFeaturesFinder::find(const Mat &image, ImageFeatures &features)
CV_Assert((image.type() == CV_8UC3) || (image.type() == CV_8UC1)); CV_Assert((image.type() == CV_8UC3) || (image.type() == CV_8UC1));
if(image.type() == CV_8UC3) if(image.type() == CV_8UC3)
{ {
cvtColor(image, gray_image, CV_BGR2GRAY); cvtColor(image, gray_image, COLOR_BGR2GRAY);
} }
else else
{ {
@ -382,9 +382,9 @@ void OrbFeaturesFinder::find(const Mat &image, ImageFeatures &features)
CV_Assert((image.type() == CV_8UC3) || (image.type() == CV_8UC4) || (image.type() == CV_8UC1)); CV_Assert((image.type() == CV_8UC3) || (image.type() == CV_8UC4) || (image.type() == CV_8UC1));
if (image.type() == CV_8UC3) { if (image.type() == CV_8UC3) {
cvtColor(image, gray_image, CV_BGR2GRAY); cvtColor(image, gray_image, COLOR_BGR2GRAY);
} else if (image.type() == CV_8UC4) { } else if (image.type() == CV_8UC4) {
cvtColor(image, gray_image, CV_BGRA2GRAY); cvtColor(image, gray_image, COLOR_BGRA2GRAY);
} else if (image.type() == CV_8UC1) { } else if (image.type() == CV_8UC1) {
gray_image=image; gray_image=image;
} else { } else {
@ -457,7 +457,7 @@ void SurfFeaturesFinderGpu::find(const Mat &image, ImageFeatures &features)
image_.upload(image); image_.upload(image);
ensureSizeIsEnough(image.size(), CV_8UC1, gray_image_); ensureSizeIsEnough(image.size(), CV_8UC1, gray_image_);
cvtColor(image_, gray_image_, CV_BGR2GRAY); cvtColor(image_, gray_image_, COLOR_BGR2GRAY);
surf_.nOctaves = num_octaves_; surf_.nOctaves = num_octaves_;
surf_.nOctaveLayers = num_layers_; surf_.nOctaveLayers = num_layers_;

View File

@ -139,8 +139,8 @@ void VoronoiSeamFinder::findInPair(size_t first, size_t second, Rect roi)
Mat unique2 = submask2.clone(); unique2.setTo(0, collision); Mat unique2 = submask2.clone(); unique2.setTo(0, collision);
Mat dist1, dist2; Mat dist1, dist2;
distanceTransform(unique1 == 0, dist1, CV_DIST_L1, 3); distanceTransform(unique1 == 0, dist1, DIST_L1, 3);
distanceTransform(unique2 == 0, dist2, CV_DIST_L1, 3); distanceTransform(unique2 == 0, dist2, DIST_L1, 3);
Mat seam = dist1 < dist2; Mat seam = dist1 < dist2;
@ -522,17 +522,17 @@ void DpSeamFinder::computeGradients(const Mat &image1, const Mat &image2)
Mat gray; Mat gray;
if (image1.channels() == 3) if (image1.channels() == 3)
cvtColor(image1, gray, CV_BGR2GRAY); cvtColor(image1, gray, COLOR_BGR2GRAY);
else if (image1.channels() == 4) else if (image1.channels() == 4)
cvtColor(image1, gray, CV_BGRA2GRAY); cvtColor(image1, gray, COLOR_BGRA2GRAY);
Sobel(gray, gradx1_, CV_32F, 1, 0); Sobel(gray, gradx1_, CV_32F, 1, 0);
Sobel(gray, grady1_, CV_32F, 0, 1); Sobel(gray, grady1_, CV_32F, 0, 1);
if (image2.channels() == 3) if (image2.channels() == 3)
cvtColor(image2, gray, CV_BGR2GRAY); cvtColor(image2, gray, COLOR_BGR2GRAY);
else if (image2.channels() == 4) else if (image2.channels() == 4)
cvtColor(image2, gray, CV_BGRA2GRAY); cvtColor(image2, gray, COLOR_BGRA2GRAY);
Sobel(gray, gradx2_, CV_32F, 1, 0); Sobel(gray, gradx2_, CV_32F, 1, 0);
Sobel(gray, grady2_, CV_32F, 0, 1); Sobel(gray, grady2_, CV_32F, 0, 1);

View File

@ -330,7 +330,7 @@ namespace cvtest
double checkSimilarity(InputArray m1, InputArray m2) double checkSimilarity(InputArray m1, InputArray m2)
{ {
Mat diff; Mat diff;
matchTemplate(getMat(m1), getMat(m2), diff, CV_TM_CCORR_NORMED); matchTemplate(getMat(m1), getMat(m2), diff, TM_CCORR_NORMED);
return std::abs(diff.at<float>(0, 0) - 1.f); return std::abs(diff.at<float>(0, 0) - 1.f);
} }

View File

@ -45,8 +45,10 @@
#ifdef __cplusplus #ifdef __cplusplus
# include "opencv2/core.hpp" # include "opencv2/core.hpp"
# include "opencv2/imgproc.hpp"
#endif #endif
#include "opencv2/imgproc.hpp"
#include "opencv2/imgproc/imgproc_c.h"
#ifdef __cplusplus #ifdef __cplusplus
extern "C" { extern "C" {

View File

@ -345,7 +345,7 @@ _exit_:
if( code < 0 ) if( code < 0 )
{ {
#if defined _DEBUG && defined WIN32 #if 0 //defined _DEBUG && defined WIN32
IplImage* dst = cvCreateImage( img_size, 8, 3 ); IplImage* dst = cvCreateImage( img_size, 8, 3 );
cvNamedWindow( "test", 1 ); cvNamedWindow( "test", 1 );
cvCmpS( img, 0, img, CV_CMP_GT ); cvCmpS( img, 0, img, CV_CMP_GT );
@ -484,7 +484,7 @@ _exit_:
if( code < 0 ) if( code < 0 )
{ {
#if defined _DEBUG && defined WIN32 #if 0// defined _DEBUG && defined WIN32
IplImage* dst = cvCreateImage( img_size, 8, 3 ); IplImage* dst = cvCreateImage( img_size, 8, 3 );
cvNamedWindow( "test", 1 ); cvNamedWindow( "test", 1 );
cvCmpS( img, 0, img, CV_CMP_GT ); cvCmpS( img, 0, img, CV_CMP_GT );

View File

@ -72,8 +72,9 @@ void CV_OptFlowPyrLKTest::run( int )
CvMat *_u = 0, *_v = 0, *_v2 = 0; CvMat *_u = 0, *_v = 0, *_v2 = 0;
char* status = 0; char* status = 0;
IplImage* imgI = 0; IplImage imgI;
IplImage* imgJ = 0; IplImage imgJ;
cv::Mat imgI2, imgJ2;
int n = 0, i = 0; int n = 0, i = 0;
@ -115,9 +116,10 @@ void CV_OptFlowPyrLKTest::run( int )
/* read first image */ /* read first image */
sprintf( filename, "%soptflow/%s", ts->get_data_path().c_str(), "rock_1.bmp" ); sprintf( filename, "%soptflow/%s", ts->get_data_path().c_str(), "rock_1.bmp" );
imgI = cvLoadImage( filename, -1 ); imgI2 = cv::imread( filename, cv::IMREAD_UNCHANGED );
imgI = imgI2;
if( !imgI ) if( imgI2.empty() )
{ {
ts->printf( cvtest::TS::LOG, "could not read %s\n", filename ); ts->printf( cvtest::TS::LOG, "could not read %s\n", filename );
code = cvtest::TS::FAIL_MISSING_TEST_DATA; code = cvtest::TS::FAIL_MISSING_TEST_DATA;
@ -126,9 +128,10 @@ void CV_OptFlowPyrLKTest::run( int )
/* read second image */ /* read second image */
sprintf( filename, "%soptflow/%s", ts->get_data_path().c_str(), "rock_2.bmp" ); sprintf( filename, "%soptflow/%s", ts->get_data_path().c_str(), "rock_2.bmp" );
imgJ = cvLoadImage( filename, -1 ); imgJ2 = cv::imread( filename, cv::IMREAD_UNCHANGED );
imgJ = imgJ2;
if( !imgJ ) if( imgJ2.empty() )
{ {
ts->printf( cvtest::TS::LOG, "could not read %s\n", filename ); ts->printf( cvtest::TS::LOG, "could not read %s\n", filename );
code = cvtest::TS::FAIL_MISSING_TEST_DATA; code = cvtest::TS::FAIL_MISSING_TEST_DATA;
@ -139,7 +142,7 @@ void CV_OptFlowPyrLKTest::run( int )
status = (char*)cvAlloc(n*sizeof(status[0])); status = (char*)cvAlloc(n*sizeof(status[0]));
/* calculate flow */ /* calculate flow */
cvCalcOpticalFlowPyrLK( imgI, imgJ, 0, 0, u, v2, n, cvSize( 41, 41 ), cvCalcOpticalFlowPyrLK( &imgI, &imgJ, 0, 0, u, v2, n, cvSize( 41, 41 ),
4, status, 0, cvTermCriteria( CV_TERMCRIT_ITER| 4, status, 0, cvTermCriteria( CV_TERMCRIT_ITER|
CV_TERMCRIT_EPS, 30, 0.01f ), 0 ); CV_TERMCRIT_EPS, 30, 0.01f ), 0 );
@ -201,9 +204,6 @@ _exit_:
cvReleaseMat( &_v ); cvReleaseMat( &_v );
cvReleaseMat( &_v2 ); cvReleaseMat( &_v2 );
cvReleaseImage( &imgI );
cvReleaseImage( &imgJ );
if( code < 0 ) if( code < 0 )
ts->set_failed_test_info( code ); ts->set_failed_test_info( code );
} }

View File

@ -84,10 +84,10 @@ public:
} }
#ifdef HAVE_OPENCV_HIGHGUI #ifdef HAVE_OPENCV_HIGHGUI
int width() {return static_cast<int>(vc.get(CV_CAP_PROP_FRAME_WIDTH));} int width() {return static_cast<int>(vc.get(CAP_PROP_FRAME_WIDTH));}
int height() {return static_cast<int>(vc.get(CV_CAP_PROP_FRAME_HEIGHT));} int height() {return static_cast<int>(vc.get(CAP_PROP_FRAME_HEIGHT));}
int count() {return static_cast<int>(vc.get(CV_CAP_PROP_FRAME_COUNT));} int count() {return static_cast<int>(vc.get(CAP_PROP_FRAME_COUNT));}
double fps() {return vc.get(CV_CAP_PROP_FPS);} double fps() {return vc.get(CAP_PROP_FPS);}
#else #else
int width() {return 0;} int width() {return 0;}
int height() {return 0;} int height() {return 0;}

View File

@ -11,9 +11,9 @@ int main(int argc, char* argv[])
printf("%s\n", message); printf("%s\n", message);
// put message to simple image // put message to simple image
Size textsize = getTextSize(message, CV_FONT_HERSHEY_COMPLEX, 3, 5, 0); Size textsize = getTextSize(message, FONT_HERSHEY_COMPLEX, 3, 5, 0);
Mat img(textsize.height + 20, textsize.width + 20, CV_32FC1, Scalar(230,230,230)); Mat img(textsize.height + 20, textsize.width + 20, CV_32FC1, Scalar(230,230,230));
putText(img, message, Point(10, img.rows - 10), CV_FONT_HERSHEY_COMPLEX, 3, Scalar(0, 0, 0), 5); putText(img, message, Point(10, img.rows - 10), FONT_HERSHEY_COMPLEX, 3, Scalar(0, 0, 0), 5);
// save\show resulting image // save\show resulting image
#if ANDROID #if ANDROID

View File

@ -40,7 +40,7 @@
#include <cstring> #include <cstring>
#include <ctime> #include <ctime>
#include "opencv2/contrib/contrib.hpp" #include "opencv2/contrib/contrib.hpp"
#include "opencv2/highgui/highgui.hpp" #include "opencv2/highgui/highgui_c.h"
static void help(char **argv) static void help(char **argv)
{ {

View File

@ -24,7 +24,7 @@
#include "opencv2/core/utility.hpp" #include "opencv2/core/utility.hpp"
#include "opencv2/video/background_segm.hpp" #include "opencv2/video/background_segm.hpp"
#include "opencv2/imgproc/imgproc_c.h" #include "opencv2/imgproc/imgproc_c.h"
#include "opencv2/highgui.hpp" #include "opencv2/highgui/highgui_c.h"
#include "opencv2/legacy.hpp" #include "opencv2/legacy.hpp"
#include <stdio.h> #include <stdio.h>

View File

@ -1,7 +1,7 @@
#include "opencv2/video/background_segm.hpp" #include "opencv2/video/background_segm.hpp"
#include "opencv2/legacy/blobtrack.hpp" #include "opencv2/legacy/blobtrack.hpp"
#include "opencv2/legacy/legacy.hpp" #include "opencv2/legacy/legacy.hpp"
#include "opencv2/highgui/highgui.hpp" #include <opencv2/highgui/highgui_c.h>
#include <opencv2/imgproc/imgproc_c.h> #include <opencv2/imgproc/imgproc_c.h>
#include <stdio.h> #include <stdio.h>

View File

@ -1,5 +1,5 @@
#include "opencv2/objdetect/objdetect.hpp" #include "opencv2/objdetect/objdetect.hpp"
#include "opencv2/highgui/highgui.hpp" #include "opencv2/highgui/highgui_c.h"
#include <ctype.h> #include <ctype.h>
#include <stdio.h> #include <stdio.h>

View File

@ -1,6 +1,6 @@
#include <opencv2/imgproc/imgproc_c.h> #include <opencv2/imgproc/imgproc_c.h>
#include <opencv2/legacy/legacy.hpp> #include <opencv2/highgui/highgui_c.h>
#include "opencv2/highgui/highgui.hpp" #include <opencv2/legacy.hpp>
#include <stdio.h> #include <stdio.h>
static void help( void ) static void help( void )

View File

@ -1,8 +1,10 @@
#include "opencv2/objdetect/objdetect.hpp" #include "opencv2/objdetect.hpp"
#include "opencv2/highgui/highgui.hpp" #include "opencv2/highgui.hpp"
#include "opencv2/imgproc/imgproc.hpp" #include "opencv2/imgproc.hpp"
#include "opencv2/core/utility.hpp" #include "opencv2/core/utility.hpp"
#include "opencv2/highgui/highgui_c.h"
#include <cctype> #include <cctype>
#include <iostream> #include <iostream>
#include <iterator> #include <iterator>
@ -207,7 +209,7 @@ void detectAndDraw( Mat& img, CascadeClassifier& cascade,
CV_RGB(255,0,255)} ; CV_RGB(255,0,255)} ;
Mat gray, smallImg( cvRound (img.rows/scale), cvRound(img.cols/scale), CV_8UC1 ); Mat gray, smallImg( cvRound (img.rows/scale), cvRound(img.cols/scale), CV_8UC1 );
cvtColor( img, gray, CV_BGR2GRAY ); cvtColor( img, gray, COLOR_BGR2GRAY );
resize( gray, smallImg, smallImg.size(), 0, 0, INTER_LINEAR ); resize( gray, smallImg, smallImg.size(), 0, 0, INTER_LINEAR );
equalizeHist( smallImg, smallImg ); equalizeHist( smallImg, smallImg );

View File

@ -1,5 +1,5 @@
#include "opencv2/video/tracking.hpp" #include "opencv2/video/tracking.hpp"
#include "opencv2/highgui/highgui.hpp" #include "opencv2/highgui/highgui_c.h"
#include "opencv2/imgproc/imgproc_c.h" #include "opencv2/imgproc/imgproc_c.h"
#include <stdio.h> #include <stdio.h>

View File

@ -6,10 +6,10 @@
*/ */
#include "opencv2/objdetect/objdetect.hpp" #include "opencv2/objdetect/objdetect.hpp"
#include "opencv2/features2d/features2d.hpp" #include "opencv2/features2d/features2d.hpp"
#include "opencv2/highgui/highgui.hpp"
#include "opencv2/calib3d/calib3d.hpp" #include "opencv2/calib3d/calib3d.hpp"
#include "opencv2/nonfree/nonfree.hpp" #include "opencv2/nonfree/nonfree.hpp"
#include "opencv2/imgproc/imgproc_c.h" #include "opencv2/imgproc/imgproc_c.h"
#include "opencv2/highgui/highgui_c.h"
#include "opencv2/legacy/legacy.hpp" #include "opencv2/legacy/legacy.hpp"
#include "opencv2/legacy/compat.hpp" #include "opencv2/legacy/compat.hpp"

View File

@ -61,7 +61,7 @@ static void trainCalonderClassifier( const string& classifierFilename, const str
string str; string str;
getline( is, str ); getline( is, str );
if (str.empty()) break; if (str.empty()) break;
Mat img = imread( str, CV_LOAD_IMAGE_GRAYSCALE ); Mat img = imread( str, IMREAD_GRAYSCALE );
if( !img.empty() ) if( !img.empty() )
trainImgs.push_back( img ); trainImgs.push_back( img );
} }
@ -106,7 +106,7 @@ static void trainCalonderClassifier( const string& classifierFilename, const str
*/ */
static void testCalonderClassifier( const string& classifierFilename, const string& imgFilename ) static void testCalonderClassifier( const string& classifierFilename, const string& imgFilename )
{ {
Mat img1 = imread( imgFilename, CV_LOAD_IMAGE_GRAYSCALE ), img2, H12; Mat img1 = imread( imgFilename, IMREAD_GRAYSCALE ), img2, H12;
if( img1.empty() ) if( img1.empty() )
{ {
cout << "Test image can not be read." << endl; cout << "Test image can not be read." << endl;

View File

@ -32,8 +32,8 @@ int main(int argc, char** argv)
help(); help();
Mat object = imread( object_filename, CV_LOAD_IMAGE_GRAYSCALE ); Mat object = imread( object_filename, IMREAD_GRAYSCALE );
Mat scene = imread( scene_filename, CV_LOAD_IMAGE_GRAYSCALE ); Mat scene = imread( scene_filename, IMREAD_GRAYSCALE );
if( !object.data || !scene.data ) if( !object.data || !scene.data )
{ {
@ -47,9 +47,9 @@ int main(int argc, char** argv)
resize(scene, image, Size(), 1./imgscale, 1./imgscale, INTER_CUBIC); resize(scene, image, Size(), 1./imgscale, 1./imgscale, INTER_CUBIC);
cvNamedWindow("Object", 1); namedWindow("Object", 1);
cvNamedWindow("Image", 1); namedWindow("Image", 1);
cvNamedWindow("Object Correspondence", 1); namedWindow("Object Correspondence", 1);
Size patchSize(32, 32); Size patchSize(32, 32);
LDetector ldetector(7, 20, 2, 2000, patchSize.width, 2); LDetector ldetector(7, 20, 2, 2000, patchSize.width, 2);

View File

@ -1,5 +1,5 @@
#include "opencv2/objdetect/objdetect.hpp" #include "opencv2/objdetect.hpp"
#include "opencv2/highgui/highgui.hpp" #include "opencv2/highgui/highgui_c.h"
#include <stdio.h> #include <stdio.h>
#ifdef HAVE_CVCONFIG_H #ifdef HAVE_CVCONFIG_H

Some files were not shown because too many files have changed in this diff Show More