mirror of
https://github.com/opencv/opencv.git
synced 2024-11-29 05:29:54 +08:00
Merge pull request #1925 from vpisarev:ocl_facedetect2
This commit is contained in:
commit
b16f0a25af
@ -221,44 +221,6 @@ The function is parallelized with the TBB library.
|
||||
* (Python) A face detection example using cascade classifiers can be found at opencv_source_code/samples/python2/facedetect.py
|
||||
|
||||
|
||||
CascadeClassifier::setImage
|
||||
-------------------------------
|
||||
Sets an image for detection.
|
||||
|
||||
.. ocv:function:: bool CascadeClassifier::setImage( Ptr<FeatureEvaluator>& feval, const Mat& image )
|
||||
|
||||
.. ocv:cfunction:: void cvSetImagesForHaarClassifierCascade( CvHaarClassifierCascade* cascade, const CvArr* sum, const CvArr* sqsum, const CvArr* tilted_sum, double scale )
|
||||
|
||||
:param cascade: Haar classifier cascade (OpenCV 1.x API only). See :ocv:func:`CascadeClassifier::detectMultiScale` for more information.
|
||||
|
||||
:param feval: Pointer to the feature evaluator used for computing features.
|
||||
|
||||
:param image: Matrix of the type ``CV_8UC1`` containing an image where the features are computed.
|
||||
|
||||
The function is automatically called by :ocv:func:`CascadeClassifier::detectMultiScale` at every image scale. But if you want to test various locations manually using :ocv:func:`CascadeClassifier::runAt`, you need to call the function before, so that the integral images are computed.
|
||||
|
||||
.. note:: in the old API you need to supply integral images (that can be obtained using :ocv:cfunc:`Integral`) instead of the original image.
|
||||
|
||||
|
||||
CascadeClassifier::runAt
|
||||
----------------------------
|
||||
Runs the detector at the specified point.
|
||||
|
||||
.. ocv:function:: int CascadeClassifier::runAt( Ptr<FeatureEvaluator>& feval, Point pt, double& weight )
|
||||
|
||||
.. ocv:cfunction:: int cvRunHaarClassifierCascade( const CvHaarClassifierCascade* cascade, CvPoint pt, int start_stage=0 )
|
||||
|
||||
:param cascade: Haar classifier cascade (OpenCV 1.x API only). See :ocv:func:`CascadeClassifier::detectMultiScale` for more information.
|
||||
|
||||
:param feval: Feature evaluator used for computing features.
|
||||
|
||||
:param pt: Upper left point of the window where the features are computed. Size of the window is equal to the size of training images.
|
||||
|
||||
The function returns 1 if the cascade classifier detects an object in the given location.
|
||||
Otherwise, it returns negated index of the stage at which the candidate has been rejected.
|
||||
|
||||
Use :ocv:func:`CascadeClassifier::setImage` to set the image for the detector to work with.
|
||||
|
||||
groupRectangles
|
||||
-------------------
|
||||
Groups the object candidate rectangles.
|
||||
|
@ -149,143 +149,97 @@ enum { CASCADE_DO_CANNY_PRUNING = 1,
|
||||
CASCADE_DO_ROUGH_SEARCH = 8
|
||||
};
|
||||
|
||||
class CV_EXPORTS_W CascadeClassifier
|
||||
class CV_EXPORTS_W BaseCascadeClassifier : public Algorithm
|
||||
{
|
||||
public:
|
||||
CV_WRAP CascadeClassifier();
|
||||
CV_WRAP CascadeClassifier( const String& filename );
|
||||
virtual ~CascadeClassifier();
|
||||
virtual ~BaseCascadeClassifier();
|
||||
virtual bool empty() const = 0;
|
||||
virtual bool load( const String& filename ) = 0;
|
||||
virtual void detectMultiScale( InputArray image,
|
||||
CV_OUT std::vector<Rect>& objects,
|
||||
double scaleFactor,
|
||||
int minNeighbors, int flags,
|
||||
Size minSize, Size maxSize ) = 0;
|
||||
|
||||
CV_WRAP virtual bool empty() const;
|
||||
CV_WRAP bool load( const String& filename );
|
||||
virtual bool read( const FileNode& node );
|
||||
CV_WRAP virtual void detectMultiScale( InputArray image,
|
||||
CV_OUT std::vector<Rect>& objects,
|
||||
double scaleFactor = 1.1,
|
||||
int minNeighbors = 3, int flags = 0,
|
||||
Size minSize = Size(),
|
||||
Size maxSize = Size() );
|
||||
virtual void detectMultiScale( InputArray image,
|
||||
CV_OUT std::vector<Rect>& objects,
|
||||
CV_OUT std::vector<int>& numDetections,
|
||||
double scaleFactor,
|
||||
int minNeighbors, int flags,
|
||||
Size minSize, Size maxSize ) = 0;
|
||||
|
||||
CV_WRAP virtual void detectMultiScale( InputArray image,
|
||||
CV_OUT std::vector<Rect>& objects,
|
||||
CV_OUT std::vector<int>& numDetections,
|
||||
double scaleFactor=1.1,
|
||||
int minNeighbors=3, int flags=0,
|
||||
Size minSize=Size(),
|
||||
Size maxSize=Size() );
|
||||
|
||||
CV_WRAP virtual void detectMultiScale( InputArray image,
|
||||
virtual void detectMultiScale( InputArray image,
|
||||
CV_OUT std::vector<Rect>& objects,
|
||||
CV_OUT std::vector<int>& rejectLevels,
|
||||
CV_OUT std::vector<double>& levelWeights,
|
||||
double scaleFactor = 1.1,
|
||||
int minNeighbors = 3, int flags = 0,
|
||||
Size minSize = Size(),
|
||||
Size maxSize = Size(),
|
||||
bool outputRejectLevels = false );
|
||||
double scaleFactor,
|
||||
int minNeighbors, int flags,
|
||||
Size minSize, Size maxSize,
|
||||
bool outputRejectLevels ) = 0;
|
||||
|
||||
virtual bool isOldFormatCascade() const = 0;
|
||||
virtual Size getOriginalWindowSize() const = 0;
|
||||
virtual int getFeatureType() const = 0;
|
||||
virtual void* getOldCascade() = 0;
|
||||
|
||||
bool isOldFormatCascade() const;
|
||||
virtual Size getOriginalWindowSize() const;
|
||||
int getFeatureType() const;
|
||||
bool setImage( const Mat& );
|
||||
|
||||
protected:
|
||||
virtual bool detectSingleScale( const Mat& image, int stripCount, Size processingRectSize,
|
||||
int stripSize, int yStep, double factor, std::vector<Rect>& candidates,
|
||||
std::vector<int>& rejectLevels, std::vector<double>& levelWeights, bool outputRejectLevels = false );
|
||||
|
||||
virtual void detectMultiScaleNoGrouping( const Mat& image, std::vector<Rect>& candidates,
|
||||
std::vector<int>& rejectLevels, std::vector<double>& levelWeights,
|
||||
double scaleFactor, Size minObjectSize, Size maxObjectSize,
|
||||
bool outputRejectLevels = false );
|
||||
|
||||
protected:
|
||||
enum { BOOST = 0
|
||||
};
|
||||
enum { DO_CANNY_PRUNING = CASCADE_DO_CANNY_PRUNING,
|
||||
SCALE_IMAGE = CASCADE_SCALE_IMAGE,
|
||||
FIND_BIGGEST_OBJECT = CASCADE_FIND_BIGGEST_OBJECT,
|
||||
DO_ROUGH_SEARCH = CASCADE_DO_ROUGH_SEARCH
|
||||
};
|
||||
|
||||
friend class CascadeClassifierInvoker;
|
||||
|
||||
template<class FEval>
|
||||
friend int predictOrdered( CascadeClassifier& cascade, Ptr<FeatureEvaluator> &featureEvaluator, double& weight);
|
||||
|
||||
template<class FEval>
|
||||
friend int predictCategorical( CascadeClassifier& cascade, Ptr<FeatureEvaluator> &featureEvaluator, double& weight);
|
||||
|
||||
template<class FEval>
|
||||
friend int predictOrderedStump( CascadeClassifier& cascade, Ptr<FeatureEvaluator> &featureEvaluator, double& weight);
|
||||
|
||||
template<class FEval>
|
||||
friend int predictCategoricalStump( CascadeClassifier& cascade, Ptr<FeatureEvaluator> &featureEvaluator, double& weight);
|
||||
|
||||
bool setImage( Ptr<FeatureEvaluator>& feval, const Mat& image);
|
||||
virtual int runAt( Ptr<FeatureEvaluator>& feval, Point pt, double& weight );
|
||||
|
||||
class Data
|
||||
{
|
||||
public:
|
||||
struct CV_EXPORTS DTreeNode
|
||||
{
|
||||
int featureIdx;
|
||||
float threshold; // for ordered features only
|
||||
int left;
|
||||
int right;
|
||||
};
|
||||
|
||||
struct CV_EXPORTS DTree
|
||||
{
|
||||
int nodeCount;
|
||||
};
|
||||
|
||||
struct CV_EXPORTS Stage
|
||||
{
|
||||
int first;
|
||||
int ntrees;
|
||||
float threshold;
|
||||
};
|
||||
|
||||
bool read(const FileNode &node);
|
||||
|
||||
bool isStumpBased;
|
||||
|
||||
int stageType;
|
||||
int featureType;
|
||||
int ncategories;
|
||||
Size origWinSize;
|
||||
|
||||
std::vector<Stage> stages;
|
||||
std::vector<DTree> classifiers;
|
||||
std::vector<DTreeNode> nodes;
|
||||
std::vector<float> leaves;
|
||||
std::vector<int> subsets;
|
||||
};
|
||||
|
||||
Data data;
|
||||
Ptr<FeatureEvaluator> featureEvaluator;
|
||||
Ptr<CvHaarClassifierCascade> oldCascade;
|
||||
|
||||
public:
|
||||
class CV_EXPORTS MaskGenerator
|
||||
{
|
||||
public:
|
||||
virtual ~MaskGenerator() {}
|
||||
virtual cv::Mat generateMask(const cv::Mat& src)=0;
|
||||
virtual void initializeMask(const cv::Mat& /*src*/) {};
|
||||
virtual Mat generateMask(const Mat& src)=0;
|
||||
virtual void initializeMask(const Mat& /*src*/) {};
|
||||
};
|
||||
void setMaskGenerator(Ptr<MaskGenerator> maskGenerator);
|
||||
Ptr<MaskGenerator> getMaskGenerator();
|
||||
|
||||
void setFaceDetectionMaskGenerator();
|
||||
|
||||
protected:
|
||||
Ptr<MaskGenerator> maskGenerator;
|
||||
virtual void setMaskGenerator(const Ptr<MaskGenerator>& maskGenerator) = 0;
|
||||
virtual Ptr<MaskGenerator> getMaskGenerator() = 0;
|
||||
};
|
||||
|
||||
class CV_EXPORTS_W CascadeClassifier
|
||||
{
|
||||
public:
|
||||
CV_WRAP CascadeClassifier();
|
||||
CV_WRAP CascadeClassifier(const String& filename);
|
||||
~CascadeClassifier();
|
||||
CV_WRAP bool empty() const;
|
||||
CV_WRAP bool load( const String& filename );
|
||||
CV_WRAP bool read( const FileNode& node );
|
||||
CV_WRAP void detectMultiScale( InputArray image,
|
||||
CV_OUT std::vector<Rect>& objects,
|
||||
double scaleFactor = 1.1,
|
||||
int minNeighbors = 3, int flags = 0,
|
||||
Size minSize = Size(),
|
||||
Size maxSize = Size() );
|
||||
|
||||
CV_WRAP void detectMultiScale( InputArray image,
|
||||
CV_OUT std::vector<Rect>& objects,
|
||||
CV_OUT std::vector<int>& numDetections,
|
||||
double scaleFactor=1.1,
|
||||
int minNeighbors=3, int flags=0,
|
||||
Size minSize=Size(),
|
||||
Size maxSize=Size() );
|
||||
|
||||
CV_WRAP void detectMultiScale( InputArray image,
|
||||
CV_OUT std::vector<Rect>& objects,
|
||||
CV_OUT std::vector<int>& rejectLevels,
|
||||
CV_OUT std::vector<double>& levelWeights,
|
||||
double scaleFactor = 1.1,
|
||||
int minNeighbors = 3, int flags = 0,
|
||||
Size minSize = Size(),
|
||||
Size maxSize = Size(),
|
||||
bool outputRejectLevels = false );
|
||||
|
||||
CV_WRAP bool isOldFormatCascade() const;
|
||||
CV_WRAP Size getOriginalWindowSize() const;
|
||||
CV_WRAP int getFeatureType() const;
|
||||
void* getOldCascade();
|
||||
|
||||
void setMaskGenerator(const Ptr<BaseCascadeClassifier::MaskGenerator>& maskGenerator);
|
||||
Ptr<BaseCascadeClassifier::MaskGenerator> getMaskGenerator();
|
||||
|
||||
Ptr<BaseCascadeClassifier> cc;
|
||||
};
|
||||
|
||||
CV_EXPORTS Ptr<BaseCascadeClassifier::MaskGenerator> createFaceDetectionMaskGenerator();
|
||||
|
||||
//////////////// HOG (Histogram-of-Oriented-Gradients) Descriptor and Object Detector //////////////
|
||||
|
||||
// struct for detection region of interest (ROI)
|
||||
|
@ -430,7 +430,6 @@ void groupRectangles_meanshift(std::vector<Rect>& rectList, std::vector<double>&
|
||||
}
|
||||
|
||||
|
||||
|
||||
FeatureEvaluator::~FeatureEvaluator() {}
|
||||
bool FeatureEvaluator::read(const FileNode&) {return true;}
|
||||
Ptr<FeatureEvaluator> FeatureEvaluator::clone() const { return Ptr<FeatureEvaluator>(); }
|
||||
@ -834,25 +833,20 @@ Ptr<FeatureEvaluator> FeatureEvaluator::create( int featureType )
|
||||
|
||||
//---------------------------------------- Classifier Cascade --------------------------------------------
|
||||
|
||||
CascadeClassifier::CascadeClassifier()
|
||||
CascadeClassifierImpl::CascadeClassifierImpl()
|
||||
{
|
||||
}
|
||||
|
||||
CascadeClassifier::CascadeClassifier(const String& filename)
|
||||
{
|
||||
load(filename);
|
||||
}
|
||||
|
||||
CascadeClassifier::~CascadeClassifier()
|
||||
CascadeClassifierImpl::~CascadeClassifierImpl()
|
||||
{
|
||||
}
|
||||
|
||||
bool CascadeClassifier::empty() const
|
||||
bool CascadeClassifierImpl::empty() const
|
||||
{
|
||||
return !oldCascade && data.stages.empty();
|
||||
}
|
||||
|
||||
bool CascadeClassifier::load(const String& filename)
|
||||
bool CascadeClassifierImpl::load(const String& filename)
|
||||
{
|
||||
oldCascade.release();
|
||||
data = Data();
|
||||
@ -862,7 +856,7 @@ bool CascadeClassifier::load(const String& filename)
|
||||
if( !fs.isOpened() )
|
||||
return false;
|
||||
|
||||
if( read(fs.getFirstTopLevelNode()) )
|
||||
if( read_(fs.getFirstTopLevelNode()) )
|
||||
return true;
|
||||
|
||||
fs.release();
|
||||
@ -871,7 +865,12 @@ bool CascadeClassifier::load(const String& filename)
|
||||
return !oldCascade.empty();
|
||||
}
|
||||
|
||||
int CascadeClassifier::runAt( Ptr<FeatureEvaluator>& evaluator, Point pt, double& weight )
|
||||
void CascadeClassifierImpl::read(const FileNode& node)
|
||||
{
|
||||
read_(node);
|
||||
}
|
||||
|
||||
int CascadeClassifierImpl::runAt( Ptr<FeatureEvaluator>& evaluator, Point pt, double& weight )
|
||||
{
|
||||
CV_Assert( !oldCascade );
|
||||
|
||||
@ -905,33 +904,33 @@ int CascadeClassifier::runAt( Ptr<FeatureEvaluator>& evaluator, Point pt, double
|
||||
}
|
||||
}
|
||||
|
||||
bool CascadeClassifier::setImage( Ptr<FeatureEvaluator>& evaluator, const Mat& image )
|
||||
bool CascadeClassifierImpl::setImage( Ptr<FeatureEvaluator>& evaluator, const Mat& image )
|
||||
{
|
||||
return empty() ? false : evaluator->setImage(image, data.origWinSize);
|
||||
}
|
||||
|
||||
void CascadeClassifier::setMaskGenerator(Ptr<MaskGenerator> _maskGenerator)
|
||||
void CascadeClassifierImpl::setMaskGenerator(const Ptr<MaskGenerator>& _maskGenerator)
|
||||
{
|
||||
maskGenerator=_maskGenerator;
|
||||
}
|
||||
Ptr<CascadeClassifier::MaskGenerator> CascadeClassifier::getMaskGenerator()
|
||||
Ptr<CascadeClassifierImpl::MaskGenerator> CascadeClassifierImpl::getMaskGenerator()
|
||||
{
|
||||
return maskGenerator;
|
||||
}
|
||||
|
||||
void CascadeClassifier::setFaceDetectionMaskGenerator()
|
||||
Ptr<BaseCascadeClassifier::MaskGenerator> createFaceDetectionMaskGenerator()
|
||||
{
|
||||
#ifdef HAVE_TEGRA_OPTIMIZATION
|
||||
setMaskGenerator(tegra::getCascadeClassifierMaskGenerator(*this));
|
||||
return tegra::getCascadeClassifierMaskGenerator(*this);
|
||||
#else
|
||||
setMaskGenerator(Ptr<CascadeClassifier::MaskGenerator>());
|
||||
return Ptr<BaseCascadeClassifier::MaskGenerator>();
|
||||
#endif
|
||||
}
|
||||
|
||||
class CascadeClassifierInvoker : public ParallelLoopBody
|
||||
{
|
||||
public:
|
||||
CascadeClassifierInvoker( CascadeClassifier& _cc, Size _sz1, int _stripSize, int _yStep, double _factor,
|
||||
CascadeClassifierInvoker( CascadeClassifierImpl& _cc, Size _sz1, int _stripSize, int _yStep, double _factor,
|
||||
std::vector<Rect>& _vec, std::vector<int>& _levels, std::vector<double>& _weights, bool outputLevels, const Mat& _mask, Mutex* _mtx)
|
||||
{
|
||||
classifier = &_cc;
|
||||
@ -950,7 +949,8 @@ public:
|
||||
{
|
||||
Ptr<FeatureEvaluator> evaluator = classifier->featureEvaluator->clone();
|
||||
|
||||
Size winSize(cvRound(classifier->data.origWinSize.width * scalingFactor), cvRound(classifier->data.origWinSize.height * scalingFactor));
|
||||
Size winSize(cvRound(classifier->data.origWinSize.width * scalingFactor),
|
||||
cvRound(classifier->data.origWinSize.height * scalingFactor));
|
||||
|
||||
int y1 = range.start * stripSize;
|
||||
int y2 = std::min(range.end * stripSize, processingRectSize.height);
|
||||
@ -995,7 +995,7 @@ public:
|
||||
}
|
||||
}
|
||||
|
||||
CascadeClassifier* classifier;
|
||||
CascadeClassifierImpl* classifier;
|
||||
std::vector<Rect>* rectangles;
|
||||
Size processingRectSize;
|
||||
int stripSize, yStep;
|
||||
@ -1010,7 +1010,7 @@ struct getRect { Rect operator ()(const CvAvgComp& e) const { return e.rect; } }
|
||||
struct getNeighbors { int operator ()(const CvAvgComp& e) const { return e.neighbors; } };
|
||||
|
||||
|
||||
bool CascadeClassifier::detectSingleScale( const Mat& image, int stripCount, Size processingRectSize,
|
||||
bool CascadeClassifierImpl::detectSingleScale( const Mat& image, int stripCount, Size processingRectSize,
|
||||
int stripSize, int yStep, double factor, std::vector<Rect>& candidates,
|
||||
std::vector<int>& levels, std::vector<double>& weights, bool outputRejectLevels )
|
||||
{
|
||||
@ -1051,27 +1051,33 @@ bool CascadeClassifier::detectSingleScale( const Mat& image, int stripCount, Siz
|
||||
return true;
|
||||
}
|
||||
|
||||
bool CascadeClassifier::isOldFormatCascade() const
|
||||
bool CascadeClassifierImpl::isOldFormatCascade() const
|
||||
{
|
||||
return !oldCascade.empty();
|
||||
}
|
||||
|
||||
|
||||
int CascadeClassifier::getFeatureType() const
|
||||
int CascadeClassifierImpl::getFeatureType() const
|
||||
{
|
||||
return featureEvaluator->getFeatureType();
|
||||
}
|
||||
|
||||
Size CascadeClassifier::getOriginalWindowSize() const
|
||||
Size CascadeClassifierImpl::getOriginalWindowSize() const
|
||||
{
|
||||
return data.origWinSize;
|
||||
}
|
||||
|
||||
bool CascadeClassifier::setImage(const Mat& image)
|
||||
bool CascadeClassifierImpl::setImage(InputArray _image)
|
||||
{
|
||||
Mat image = _image.getMat();
|
||||
return featureEvaluator->setImage(image, data.origWinSize);
|
||||
}
|
||||
|
||||
void* CascadeClassifierImpl::getOldCascade()
|
||||
{
|
||||
return oldCascade;
|
||||
}
|
||||
|
||||
static void detectMultiScaleOldFormat( const Mat& image, Ptr<CvHaarClassifierCascade> oldCascade,
|
||||
std::vector<Rect>& objects,
|
||||
std::vector<int>& rejectLevels,
|
||||
@ -1090,7 +1096,7 @@ static void detectMultiScaleOldFormat( const Mat& image, Ptr<CvHaarClassifierCas
|
||||
std::transform(vecAvgComp.begin(), vecAvgComp.end(), objects.begin(), getRect());
|
||||
}
|
||||
|
||||
void CascadeClassifier::detectMultiScaleNoGrouping( const Mat& image, std::vector<Rect>& candidates,
|
||||
void CascadeClassifierImpl::detectMultiScaleNoGrouping( const Mat& image, std::vector<Rect>& candidates,
|
||||
std::vector<int>& rejectLevels, std::vector<double>& levelWeights,
|
||||
double scaleFactor, Size minObjectSize, Size maxObjectSize,
|
||||
bool outputRejectLevels )
|
||||
@ -1154,7 +1160,7 @@ void CascadeClassifier::detectMultiScaleNoGrouping( const Mat& image, std::vecto
|
||||
}
|
||||
}
|
||||
|
||||
void CascadeClassifier::detectMultiScale( InputArray _image, std::vector<Rect>& objects,
|
||||
void CascadeClassifierImpl::detectMultiScale( InputArray _image, std::vector<Rect>& objects,
|
||||
std::vector<int>& rejectLevels,
|
||||
std::vector<double>& levelWeights,
|
||||
double scaleFactor, int minNeighbors,
|
||||
@ -1189,7 +1195,7 @@ void CascadeClassifier::detectMultiScale( InputArray _image, std::vector<Rect>&
|
||||
}
|
||||
}
|
||||
|
||||
void CascadeClassifier::detectMultiScale( InputArray _image, std::vector<Rect>& objects,
|
||||
void CascadeClassifierImpl::detectMultiScale( InputArray _image, std::vector<Rect>& objects,
|
||||
double scaleFactor, int minNeighbors,
|
||||
int flags, Size minObjectSize, Size maxObjectSize)
|
||||
{
|
||||
@ -1200,7 +1206,7 @@ void CascadeClassifier::detectMultiScale( InputArray _image, std::vector<Rect>&
|
||||
minNeighbors, flags, minObjectSize, maxObjectSize );
|
||||
}
|
||||
|
||||
void CascadeClassifier::detectMultiScale( InputArray _image, std::vector<Rect>& objects,
|
||||
void CascadeClassifierImpl::detectMultiScale( InputArray _image, std::vector<Rect>& objects,
|
||||
std::vector<int>& numDetections, double scaleFactor,
|
||||
int minNeighbors, int flags, Size minObjectSize,
|
||||
Size maxObjectSize )
|
||||
@ -1229,7 +1235,7 @@ void CascadeClassifier::detectMultiScale( InputArray _image, std::vector<Rect>&
|
||||
}
|
||||
}
|
||||
|
||||
bool CascadeClassifier::Data::read(const FileNode &root)
|
||||
bool CascadeClassifierImpl::Data::read(const FileNode &root)
|
||||
{
|
||||
static const float THRESHOLD_EPS = 1e-5f;
|
||||
|
||||
@ -1339,7 +1345,7 @@ bool CascadeClassifier::Data::read(const FileNode &root)
|
||||
return true;
|
||||
}
|
||||
|
||||
bool CascadeClassifier::read(const FileNode& root)
|
||||
bool CascadeClassifierImpl::read_(const FileNode& root)
|
||||
{
|
||||
if( !data.read(root) )
|
||||
return false;
|
||||
@ -1356,4 +1362,117 @@ bool CascadeClassifier::read(const FileNode& root)
|
||||
template<> void DefaultDeleter<CvHaarClassifierCascade>::operator ()(CvHaarClassifierCascade* obj) const
|
||||
{ cvReleaseHaarClassifierCascade(&obj); }
|
||||
|
||||
|
||||
BaseCascadeClassifier::~BaseCascadeClassifier()
|
||||
{
|
||||
}
|
||||
|
||||
CascadeClassifier::CascadeClassifier() {}
|
||||
CascadeClassifier::CascadeClassifier(const String& filename)
|
||||
{
|
||||
load(filename);
|
||||
}
|
||||
|
||||
CascadeClassifier::~CascadeClassifier()
|
||||
{
|
||||
}
|
||||
|
||||
bool CascadeClassifier::empty() const
|
||||
{
|
||||
return cc.empty() || cc->empty();
|
||||
}
|
||||
|
||||
bool CascadeClassifier::load( const String& filename )
|
||||
{
|
||||
cc = makePtr<CascadeClassifierImpl>();
|
||||
if(!cc->load(filename))
|
||||
cc.release();
|
||||
return !empty();
|
||||
}
|
||||
|
||||
bool CascadeClassifier::read(const FileNode &root)
|
||||
{
|
||||
Ptr<CascadeClassifierImpl> ccimpl;
|
||||
bool ok = ccimpl->read_(root);
|
||||
if( ok )
|
||||
cc = ccimpl.staticCast<BaseCascadeClassifier>();
|
||||
else
|
||||
cc.release();
|
||||
return ok;
|
||||
}
|
||||
|
||||
void CascadeClassifier::detectMultiScale( InputArray image,
|
||||
CV_OUT std::vector<Rect>& objects,
|
||||
double scaleFactor,
|
||||
int minNeighbors, int flags,
|
||||
Size minSize,
|
||||
Size maxSize )
|
||||
{
|
||||
CV_Assert(!empty());
|
||||
cc->detectMultiScale(image, objects, scaleFactor, minNeighbors, flags, minSize, maxSize);
|
||||
}
|
||||
|
||||
void CascadeClassifier::detectMultiScale( InputArray image,
|
||||
CV_OUT std::vector<Rect>& objects,
|
||||
CV_OUT std::vector<int>& numDetections,
|
||||
double scaleFactor,
|
||||
int minNeighbors, int flags,
|
||||
Size minSize, Size maxSize )
|
||||
{
|
||||
CV_Assert(!empty());
|
||||
cc->detectMultiScale(image, objects, numDetections,
|
||||
scaleFactor, minNeighbors, flags, minSize, maxSize);
|
||||
}
|
||||
|
||||
void CascadeClassifier::detectMultiScale( InputArray image,
|
||||
CV_OUT std::vector<Rect>& objects,
|
||||
CV_OUT std::vector<int>& rejectLevels,
|
||||
CV_OUT std::vector<double>& levelWeights,
|
||||
double scaleFactor,
|
||||
int minNeighbors, int flags,
|
||||
Size minSize, Size maxSize,
|
||||
bool outputRejectLevels )
|
||||
{
|
||||
CV_Assert(!empty());
|
||||
cc->detectMultiScale(image, objects, rejectLevels, levelWeights,
|
||||
scaleFactor, minNeighbors, flags,
|
||||
minSize, maxSize, outputRejectLevels);
|
||||
}
|
||||
|
||||
bool CascadeClassifier::isOldFormatCascade() const
|
||||
{
|
||||
CV_Assert(!empty());
|
||||
return cc->isOldFormatCascade();
|
||||
}
|
||||
|
||||
Size CascadeClassifier::getOriginalWindowSize() const
|
||||
{
|
||||
CV_Assert(!empty());
|
||||
return cc->getOriginalWindowSize();
|
||||
}
|
||||
|
||||
int CascadeClassifier::getFeatureType() const
|
||||
{
|
||||
CV_Assert(!empty());
|
||||
return cc->getFeatureType();
|
||||
}
|
||||
|
||||
void* CascadeClassifier::getOldCascade()
|
||||
{
|
||||
CV_Assert(!empty());
|
||||
return cc->getOldCascade();
|
||||
}
|
||||
|
||||
void CascadeClassifier::setMaskGenerator(const Ptr<BaseCascadeClassifier::MaskGenerator>& maskGenerator)
|
||||
{
|
||||
CV_Assert(!empty());
|
||||
cc->setMaskGenerator(maskGenerator);
|
||||
}
|
||||
|
||||
Ptr<BaseCascadeClassifier::MaskGenerator> CascadeClassifier::getMaskGenerator()
|
||||
{
|
||||
CV_Assert(!empty());
|
||||
return cc->getMaskGenerator();
|
||||
}
|
||||
|
||||
} // namespace cv
|
||||
|
@ -3,6 +3,132 @@
|
||||
namespace cv
|
||||
{
|
||||
|
||||
class CascadeClassifierImpl : public BaseCascadeClassifier
|
||||
{
|
||||
public:
|
||||
CascadeClassifierImpl();
|
||||
virtual ~CascadeClassifierImpl();
|
||||
|
||||
bool empty() const;
|
||||
bool load( const String& filename );
|
||||
void read( const FileNode& node );
|
||||
bool read_( const FileNode& node );
|
||||
void detectMultiScale( InputArray image,
|
||||
CV_OUT std::vector<Rect>& objects,
|
||||
double scaleFactor = 1.1,
|
||||
int minNeighbors = 3, int flags = 0,
|
||||
Size minSize = Size(),
|
||||
Size maxSize = Size() );
|
||||
|
||||
void detectMultiScale( InputArray image,
|
||||
CV_OUT std::vector<Rect>& objects,
|
||||
CV_OUT std::vector<int>& numDetections,
|
||||
double scaleFactor=1.1,
|
||||
int minNeighbors=3, int flags=0,
|
||||
Size minSize=Size(),
|
||||
Size maxSize=Size() );
|
||||
|
||||
void detectMultiScale( InputArray image,
|
||||
CV_OUT std::vector<Rect>& objects,
|
||||
CV_OUT std::vector<int>& rejectLevels,
|
||||
CV_OUT std::vector<double>& levelWeights,
|
||||
double scaleFactor = 1.1,
|
||||
int minNeighbors = 3, int flags = 0,
|
||||
Size minSize = Size(),
|
||||
Size maxSize = Size(),
|
||||
bool outputRejectLevels = false );
|
||||
|
||||
|
||||
bool isOldFormatCascade() const;
|
||||
Size getOriginalWindowSize() const;
|
||||
int getFeatureType() const;
|
||||
bool setImage( InputArray );
|
||||
void* getOldCascade();
|
||||
|
||||
void setMaskGenerator(const Ptr<MaskGenerator>& maskGenerator);
|
||||
Ptr<MaskGenerator> getMaskGenerator();
|
||||
|
||||
protected:
|
||||
bool detectSingleScale( const Mat& image, int stripCount, Size processingRectSize,
|
||||
int stripSize, int yStep, double factor, std::vector<Rect>& candidates,
|
||||
std::vector<int>& rejectLevels, std::vector<double>& levelWeights, bool outputRejectLevels = false );
|
||||
|
||||
void detectMultiScaleNoGrouping( const Mat& image, std::vector<Rect>& candidates,
|
||||
std::vector<int>& rejectLevels, std::vector<double>& levelWeights,
|
||||
double scaleFactor, Size minObjectSize, Size maxObjectSize,
|
||||
bool outputRejectLevels = false );
|
||||
|
||||
enum { BOOST = 0
|
||||
};
|
||||
enum { DO_CANNY_PRUNING = CASCADE_DO_CANNY_PRUNING,
|
||||
SCALE_IMAGE = CASCADE_SCALE_IMAGE,
|
||||
FIND_BIGGEST_OBJECT = CASCADE_FIND_BIGGEST_OBJECT,
|
||||
DO_ROUGH_SEARCH = CASCADE_DO_ROUGH_SEARCH
|
||||
};
|
||||
|
||||
friend class CascadeClassifierInvoker;
|
||||
|
||||
template<class FEval>
|
||||
friend int predictOrdered( CascadeClassifierImpl& cascade, Ptr<FeatureEvaluator> &featureEvaluator, double& weight);
|
||||
|
||||
template<class FEval>
|
||||
friend int predictCategorical( CascadeClassifierImpl& cascade, Ptr<FeatureEvaluator> &featureEvaluator, double& weight);
|
||||
|
||||
template<class FEval>
|
||||
friend int predictOrderedStump( CascadeClassifierImpl& cascade, Ptr<FeatureEvaluator> &featureEvaluator, double& weight);
|
||||
|
||||
template<class FEval>
|
||||
friend int predictCategoricalStump( CascadeClassifierImpl& cascade, Ptr<FeatureEvaluator> &featureEvaluator, double& weight);
|
||||
|
||||
bool setImage( Ptr<FeatureEvaluator>& feval, const Mat& image);
|
||||
int runAt( Ptr<FeatureEvaluator>& feval, Point pt, double& weight );
|
||||
|
||||
class Data
|
||||
{
|
||||
public:
|
||||
struct DTreeNode
|
||||
{
|
||||
int featureIdx;
|
||||
float threshold; // for ordered features only
|
||||
int left;
|
||||
int right;
|
||||
};
|
||||
|
||||
struct DTree
|
||||
{
|
||||
int nodeCount;
|
||||
};
|
||||
|
||||
struct Stage
|
||||
{
|
||||
int first;
|
||||
int ntrees;
|
||||
float threshold;
|
||||
};
|
||||
|
||||
bool read(const FileNode &node);
|
||||
|
||||
bool isStumpBased;
|
||||
|
||||
int stageType;
|
||||
int featureType;
|
||||
int ncategories;
|
||||
Size origWinSize;
|
||||
|
||||
std::vector<Stage> stages;
|
||||
std::vector<DTree> classifiers;
|
||||
std::vector<DTreeNode> nodes;
|
||||
std::vector<float> leaves;
|
||||
std::vector<int> subsets;
|
||||
};
|
||||
|
||||
Data data;
|
||||
Ptr<FeatureEvaluator> featureEvaluator;
|
||||
Ptr<CvHaarClassifierCascade> oldCascade;
|
||||
|
||||
Ptr<MaskGenerator> maskGenerator;
|
||||
};
|
||||
|
||||
#define CC_CASCADE_PARAMS "cascadeParams"
|
||||
#define CC_STAGE_TYPE "stageType"
|
||||
#define CC_FEATURE_TYPE "featureType"
|
||||
@ -322,30 +448,31 @@ inline void HOGEvaluator::Feature :: updatePtrs( const std::vector<Mat> &_hist,
|
||||
//---------------------------------------------- predictor functions -------------------------------------
|
||||
|
||||
template<class FEval>
|
||||
inline int predictOrdered( CascadeClassifier& cascade, Ptr<FeatureEvaluator> &_featureEvaluator, double& sum )
|
||||
inline int predictOrdered( CascadeClassifierImpl& cascade,
|
||||
Ptr<FeatureEvaluator> &_featureEvaluator, double& sum )
|
||||
{
|
||||
int nstages = (int)cascade.data.stages.size();
|
||||
int nodeOfs = 0, leafOfs = 0;
|
||||
FEval& featureEvaluator = (FEval&)*_featureEvaluator;
|
||||
float* cascadeLeaves = &cascade.data.leaves[0];
|
||||
CascadeClassifier::Data::DTreeNode* cascadeNodes = &cascade.data.nodes[0];
|
||||
CascadeClassifier::Data::DTree* cascadeWeaks = &cascade.data.classifiers[0];
|
||||
CascadeClassifier::Data::Stage* cascadeStages = &cascade.data.stages[0];
|
||||
CascadeClassifierImpl::Data::DTreeNode* cascadeNodes = &cascade.data.nodes[0];
|
||||
CascadeClassifierImpl::Data::DTree* cascadeWeaks = &cascade.data.classifiers[0];
|
||||
CascadeClassifierImpl::Data::Stage* cascadeStages = &cascade.data.stages[0];
|
||||
|
||||
for( int si = 0; si < nstages; si++ )
|
||||
{
|
||||
CascadeClassifier::Data::Stage& stage = cascadeStages[si];
|
||||
CascadeClassifierImpl::Data::Stage& stage = cascadeStages[si];
|
||||
int wi, ntrees = stage.ntrees;
|
||||
sum = 0;
|
||||
|
||||
for( wi = 0; wi < ntrees; wi++ )
|
||||
{
|
||||
CascadeClassifier::Data::DTree& weak = cascadeWeaks[stage.first + wi];
|
||||
CascadeClassifierImpl::Data::DTree& weak = cascadeWeaks[stage.first + wi];
|
||||
int idx = 0, root = nodeOfs;
|
||||
|
||||
do
|
||||
{
|
||||
CascadeClassifier::Data::DTreeNode& node = cascadeNodes[root + idx];
|
||||
CascadeClassifierImpl::Data::DTreeNode& node = cascadeNodes[root + idx];
|
||||
double val = featureEvaluator(node.featureIdx);
|
||||
idx = val < node.threshold ? node.left : node.right;
|
||||
}
|
||||
@ -361,7 +488,8 @@ inline int predictOrdered( CascadeClassifier& cascade, Ptr<FeatureEvaluator> &_f
|
||||
}
|
||||
|
||||
template<class FEval>
|
||||
inline int predictCategorical( CascadeClassifier& cascade, Ptr<FeatureEvaluator> &_featureEvaluator, double& sum )
|
||||
inline int predictCategorical( CascadeClassifierImpl& cascade,
|
||||
Ptr<FeatureEvaluator> &_featureEvaluator, double& sum )
|
||||
{
|
||||
int nstages = (int)cascade.data.stages.size();
|
||||
int nodeOfs = 0, leafOfs = 0;
|
||||
@ -369,23 +497,23 @@ inline int predictCategorical( CascadeClassifier& cascade, Ptr<FeatureEvaluator>
|
||||
size_t subsetSize = (cascade.data.ncategories + 31)/32;
|
||||
int* cascadeSubsets = &cascade.data.subsets[0];
|
||||
float* cascadeLeaves = &cascade.data.leaves[0];
|
||||
CascadeClassifier::Data::DTreeNode* cascadeNodes = &cascade.data.nodes[0];
|
||||
CascadeClassifier::Data::DTree* cascadeWeaks = &cascade.data.classifiers[0];
|
||||
CascadeClassifier::Data::Stage* cascadeStages = &cascade.data.stages[0];
|
||||
CascadeClassifierImpl::Data::DTreeNode* cascadeNodes = &cascade.data.nodes[0];
|
||||
CascadeClassifierImpl::Data::DTree* cascadeWeaks = &cascade.data.classifiers[0];
|
||||
CascadeClassifierImpl::Data::Stage* cascadeStages = &cascade.data.stages[0];
|
||||
|
||||
for(int si = 0; si < nstages; si++ )
|
||||
{
|
||||
CascadeClassifier::Data::Stage& stage = cascadeStages[si];
|
||||
CascadeClassifierImpl::Data::Stage& stage = cascadeStages[si];
|
||||
int wi, ntrees = stage.ntrees;
|
||||
sum = 0;
|
||||
|
||||
for( wi = 0; wi < ntrees; wi++ )
|
||||
{
|
||||
CascadeClassifier::Data::DTree& weak = cascadeWeaks[stage.first + wi];
|
||||
CascadeClassifierImpl::Data::DTree& weak = cascadeWeaks[stage.first + wi];
|
||||
int idx = 0, root = nodeOfs;
|
||||
do
|
||||
{
|
||||
CascadeClassifier::Data::DTreeNode& node = cascadeNodes[root + idx];
|
||||
CascadeClassifierImpl::Data::DTreeNode& node = cascadeNodes[root + idx];
|
||||
int c = featureEvaluator(node.featureIdx);
|
||||
const int* subset = &cascadeSubsets[(root + idx)*subsetSize];
|
||||
idx = (subset[c>>5] & (1 << (c & 31))) ? node.left : node.right;
|
||||
@ -402,24 +530,25 @@ inline int predictCategorical( CascadeClassifier& cascade, Ptr<FeatureEvaluator>
|
||||
}
|
||||
|
||||
template<class FEval>
|
||||
inline int predictOrderedStump( CascadeClassifier& cascade, Ptr<FeatureEvaluator> &_featureEvaluator, double& sum )
|
||||
inline int predictOrderedStump( CascadeClassifierImpl& cascade,
|
||||
Ptr<FeatureEvaluator> &_featureEvaluator, double& sum )
|
||||
{
|
||||
int nodeOfs = 0, leafOfs = 0;
|
||||
FEval& featureEvaluator = (FEval&)*_featureEvaluator;
|
||||
float* cascadeLeaves = &cascade.data.leaves[0];
|
||||
CascadeClassifier::Data::DTreeNode* cascadeNodes = &cascade.data.nodes[0];
|
||||
CascadeClassifier::Data::Stage* cascadeStages = &cascade.data.stages[0];
|
||||
CascadeClassifierImpl::Data::DTreeNode* cascadeNodes = &cascade.data.nodes[0];
|
||||
CascadeClassifierImpl::Data::Stage* cascadeStages = &cascade.data.stages[0];
|
||||
|
||||
int nstages = (int)cascade.data.stages.size();
|
||||
for( int stageIdx = 0; stageIdx < nstages; stageIdx++ )
|
||||
{
|
||||
CascadeClassifier::Data::Stage& stage = cascadeStages[stageIdx];
|
||||
CascadeClassifierImpl::Data::Stage& stage = cascadeStages[stageIdx];
|
||||
sum = 0.0;
|
||||
|
||||
int ntrees = stage.ntrees;
|
||||
for( int i = 0; i < ntrees; i++, nodeOfs++, leafOfs+= 2 )
|
||||
{
|
||||
CascadeClassifier::Data::DTreeNode& node = cascadeNodes[nodeOfs];
|
||||
CascadeClassifierImpl::Data::DTreeNode& node = cascadeNodes[nodeOfs];
|
||||
double value = featureEvaluator(node.featureIdx);
|
||||
sum += cascadeLeaves[ value < node.threshold ? leafOfs : leafOfs + 1 ];
|
||||
}
|
||||
@ -432,7 +561,8 @@ inline int predictOrderedStump( CascadeClassifier& cascade, Ptr<FeatureEvaluator
|
||||
}
|
||||
|
||||
template<class FEval>
|
||||
inline int predictCategoricalStump( CascadeClassifier& cascade, Ptr<FeatureEvaluator> &_featureEvaluator, double& sum )
|
||||
inline int predictCategoricalStump( CascadeClassifierImpl& cascade,
|
||||
Ptr<FeatureEvaluator> &_featureEvaluator, double& sum )
|
||||
{
|
||||
int nstages = (int)cascade.data.stages.size();
|
||||
int nodeOfs = 0, leafOfs = 0;
|
||||
@ -440,15 +570,15 @@ inline int predictCategoricalStump( CascadeClassifier& cascade, Ptr<FeatureEvalu
|
||||
size_t subsetSize = (cascade.data.ncategories + 31)/32;
|
||||
int* cascadeSubsets = &cascade.data.subsets[0];
|
||||
float* cascadeLeaves = &cascade.data.leaves[0];
|
||||
CascadeClassifier::Data::DTreeNode* cascadeNodes = &cascade.data.nodes[0];
|
||||
CascadeClassifier::Data::Stage* cascadeStages = &cascade.data.stages[0];
|
||||
CascadeClassifierImpl::Data::DTreeNode* cascadeNodes = &cascade.data.nodes[0];
|
||||
CascadeClassifierImpl::Data::Stage* cascadeStages = &cascade.data.stages[0];
|
||||
|
||||
#ifdef HAVE_TEGRA_OPTIMIZATION
|
||||
float tmp = 0; // float accumulator -- float operations are quicker
|
||||
#endif
|
||||
for( int si = 0; si < nstages; si++ )
|
||||
{
|
||||
CascadeClassifier::Data::Stage& stage = cascadeStages[si];
|
||||
CascadeClassifierImpl::Data::Stage& stage = cascadeStages[si];
|
||||
int wi, ntrees = stage.ntrees;
|
||||
#ifdef HAVE_TEGRA_OPTIMIZATION
|
||||
tmp = 0;
|
||||
@ -458,7 +588,7 @@ inline int predictCategoricalStump( CascadeClassifier& cascade, Ptr<FeatureEvalu
|
||||
|
||||
for( wi = 0; wi < ntrees; wi++ )
|
||||
{
|
||||
CascadeClassifier::Data::DTreeNode& node = cascadeNodes[nodeOfs];
|
||||
CascadeClassifierImpl::Data::DTreeNode& node = cascadeNodes[nodeOfs];
|
||||
int c = featureEvaluator(node.featureIdx);
|
||||
const int* subset = &cascadeSubsets[nodeOfs*subsetSize];
|
||||
#ifdef HAVE_TEGRA_OPTIMIZATION
|
||||
|
@ -655,7 +655,7 @@ void OclCascadeClassifier::detectMultiScale(oclMat &gimg, CV_OUT std::vector<cv:
|
||||
//CvSeq *cv::ocl::OclCascadeClassifier::oclHaarDetectObjects( oclMat &gimg, CvMemStorage *storage, double scaleFactor,
|
||||
// int minNeighbors, int flags, CvSize minSize, CvSize maxSize)
|
||||
{
|
||||
CvHaarClassifierCascade *cascade = oldCascade;
|
||||
CvHaarClassifierCascade *cascade = (CvHaarClassifierCascade*)getOldCascade();
|
||||
|
||||
const double GROUP_EPS = 0.2;
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user