opencv/samples/cpp/points_classifier.cpp

425 lines
12 KiB
C++
Raw Normal View History

#include "opencv2/opencv_modules.hpp"
2011-05-01 00:44:34 +08:00
#include "opencv2/core/core.hpp"
#include "opencv2/ml/ml.hpp"
#include "opencv2/highgui/highgui.hpp"
#ifdef HAVE_OPENCV_OCL
#define _OCL_KNN_ 1 // select whether using ocl::KNN method or not, default is using
#define _OCL_SVM_ 1 // select whether using ocl::svm method or not, default is using
#include "opencv2/ocl/ocl.hpp"
#endif
2011-05-01 00:44:34 +08:00
#include <stdio.h>
using namespace std;
using namespace cv;
using namespace cv::ml;
2011-05-01 00:44:34 +08:00
const Scalar WHITE_COLOR = Scalar(255,255,255);
2011-05-01 00:44:34 +08:00
const string winName = "points";
const int testStep = 5;
Mat img, imgDst;
2011-05-01 00:44:34 +08:00
RNG rng;
vector<Point> trainedPoints;
vector<int> trainedPointsMarkers;
const int MAX_CLASSES = 2;
vector<Vec3b> classColors(MAX_CLASSES);
int currentClass = 0;
vector<int> classCounters(MAX_CLASSES);
#define _NBC_ 1 // normal Bayessian classifier
#define _KNN_ 1 // k nearest neighbors classifier
#define _SVM_ 1 // support vectors machine
#define _DT_ 1 // decision tree
#define _BT_ 1 // ADA Boost
#define _GBT_ 0 // gradient boosted trees
#define _RF_ 1 // random forest
#define _ANN_ 1 // artificial neural networks
#define _EM_ 1 // expectation-maximization
2011-05-01 00:44:34 +08:00
2012-06-08 01:21:29 +08:00
static void on_mouse( int event, int x, int y, int /*flags*/, void* )
2011-05-01 00:44:34 +08:00
{
if( img.empty() )
return;
int updateFlag = 0;
if( event == EVENT_LBUTTONUP )
2011-05-01 00:44:34 +08:00
{
trainedPoints.push_back( Point(x,y) );
trainedPointsMarkers.push_back( currentClass );
classCounters[currentClass]++;
2011-05-01 00:44:34 +08:00
updateFlag = true;
}
//draw
if( updateFlag )
{
img = Scalar::all(0);
// draw points
for( size_t i = 0; i < trainedPoints.size(); i++ )
{
Vec3b c = classColors[trainedPointsMarkers[i]];
circle( img, trainedPoints[i], 5, Scalar(c), -1 );
}
2011-05-01 00:44:34 +08:00
imshow( winName, img );
}
}
static Mat prepare_train_samples(const vector<Point>& pts)
2011-05-01 00:44:34 +08:00
{
Mat samples;
Mat(pts).reshape(1, (int)pts.size()).convertTo(samples, CV_32F);
return samples;
2011-05-01 00:44:34 +08:00
}
static Ptr<TrainData> prepare_train_data()
{
Mat samples = prepare_train_samples(trainedPoints);
return TrainData::create(samples, ROW_SAMPLE, Mat(trainedPointsMarkers));
}
static void predict_and_paint(const Ptr<StatModel>& model, Mat& dst)
{
Mat testSample( 1, 2, CV_32FC1 );
for( int y = 0; y < img.rows; y += testStep )
{
for( int x = 0; x < img.cols; x += testStep )
{
testSample.at<float>(0) = (float)x;
testSample.at<float>(1) = (float)y;
int response = (int)model->predict( testSample );
dst.at<Vec3b>(y, x) = classColors[response];
}
}
}
#if _NBC_
static void find_decision_boundary_NBC()
2011-05-01 00:44:34 +08:00
{
// learn classifier
Ptr<NormalBayesClassifier> normalBayesClassifier = NormalBayesClassifier::create();
normalBayesClassifier->train(prepare_train_data());
2011-05-01 00:44:34 +08:00
predict_and_paint(normalBayesClassifier, imgDst);
}
#endif
#if _KNN_
static void find_decision_boundary_KNN( int K )
{
Ptr<KNearest> knn = KNearest::create(true);
knn->setDefaultK(K);
knn->train(prepare_train_data());
2011-05-01 00:44:34 +08:00
predict_and_paint(knn, imgDst);
2011-05-01 00:44:34 +08:00
}
#endif
#if _SVM_
static void find_decision_boundary_SVM( SVM::Params params )
2011-05-01 00:44:34 +08:00
{
Ptr<SVM> svm = SVM::create(params);
svm->train(prepare_train_data());
2011-05-01 00:44:34 +08:00
predict_and_paint(svm, imgDst);
2011-05-01 00:44:34 +08:00
Mat sv = svm->getSupportVectors();
for( int i = 0; i < sv.rows; i++ )
2011-05-01 00:44:34 +08:00
{
const float* supportVector = sv.ptr<float>(i);
circle( imgDst, Point(saturate_cast<int>(supportVector[0]),saturate_cast<int>(supportVector[1])), 5, Scalar(255,255,255), -1 );
2011-05-01 00:44:34 +08:00
}
}
#endif
#if _DT_
2012-06-08 01:21:29 +08:00
static void find_decision_boundary_DT()
2011-05-01 00:44:34 +08:00
{
DTrees::Params params;
params.maxDepth = 8;
params.minSampleCount = 2;
params.useSurrogates = false;
params.CVFolds = 0; // the number of cross-validation folds
params.use1SERule = false;
params.truncatePrunedTree = false;
Ptr<DTrees> dtree = DTrees::create(params);
dtree->train(prepare_train_data());
predict_and_paint(dtree, imgDst);
}
#endif
#if _BT_
static void find_decision_boundary_BT()
{
Boost::Params params( Boost::DISCRETE, // boost_type
100, // weak_count
0.95, // weight_trim_rate
2, // max_depth
false, //use_surrogates
Mat() // priors
);
Ptr<Boost> boost = Boost::create(params);
boost->train(prepare_train_data());
predict_and_paint(boost, imgDst);
}
#endif
#if _GBT_
static void find_decision_boundary_GBT()
{
GBTrees::Params params( GBTrees::DEVIANCE_LOSS, // loss_function_type
100, // weak_count
0.1f, // shrinkage
1.0f, // subsample_portion
2, // max_depth
false // use_surrogates )
);
Ptr<GBTrees> gbtrees = GBTrees::create(params);
gbtrees->train(prepare_train_data());
predict_and_paint(gbtrees, imgDst);
2011-05-01 00:44:34 +08:00
}
#endif
#if _RF_
static void find_decision_boundary_RF()
2011-05-01 00:44:34 +08:00
{
RTrees::Params params( 4, // max_depth,
2011-05-01 00:44:34 +08:00
2, // min_sample_count,
0.f, // regression_accuracy,
false, // use_surrogates,
16, // max_categories,
Mat(), // priors,
2011-05-01 00:44:34 +08:00
false, // calc_var_importance,
1, // nactive_vars,
TermCriteria(TermCriteria::MAX_ITER, 5, 0) // max_num_of_trees_in_the_forest,
2011-05-01 00:44:34 +08:00
);
Ptr<RTrees> rtrees = RTrees::create(params);
rtrees->train(prepare_train_data());
predict_and_paint(rtrees, imgDst);
2011-05-01 00:44:34 +08:00
}
#endif
#if _ANN_
static void find_decision_boundary_ANN( const Mat& layer_sizes )
2011-05-01 00:44:34 +08:00
{
ANN_MLP::Params params(TermCriteria(TermCriteria::MAX_ITER+TermCriteria::EPS, 300, FLT_EPSILON),
ANN_MLP::Params::BACKPROP, 0.001);
Ptr<ANN_MLP> ann = ANN_MLP::create(layer_sizes, params, ANN_MLP::SIGMOID_SYM, 1, 1 );
2011-05-01 00:44:34 +08:00
Mat trainClasses = Mat::zeros( trainedPoints.size(), classColors.size(), CV_32FC1 );
for( int i = 0; i < trainClasses.rows; i++ )
2011-05-01 00:44:34 +08:00
{
trainClasses.at<float>(i, trainedPointsMarkers[i]) = 1.f;
2011-05-01 00:44:34 +08:00
}
Mat samples = prepare_train_samples(trainedPoints);
Ptr<TrainData> tdata = TrainData::create(samples, ROW_SAMPLE, trainClasses);
2011-05-01 00:44:34 +08:00
ann->train(tdata);
predict_and_paint(ann, imgDst);
2011-05-01 00:44:34 +08:00
}
#endif
#if _EM_
static void find_decision_boundary_EM()
2011-05-01 00:44:34 +08:00
{
img.copyTo( imgDst );
2011-05-01 00:44:34 +08:00
Mat samples = prepare_train_samples(trainedPoints);
2011-05-01 00:44:34 +08:00
int i, j, nmodels = (int)classColors.size();
vector<Ptr<EM> > em_models(nmodels);
Mat modelSamples;
2012-04-30 22:33:52 +08:00
for( i = 0; i < nmodels; i++ )
2012-04-30 22:33:52 +08:00
{
const int componentCount = 3;
modelSamples.release();
for( j = 0; j < samples.rows; j++ )
2012-04-30 22:33:52 +08:00
{
if( trainedPointsMarkers[j] == i )
modelSamples.push_back(samples.row(j));
2012-04-30 22:33:52 +08:00
}
// learn models
if( !modelSamples.empty() )
{
em_models[i] = EM::train(modelSamples, noArray(), noArray(), noArray(),
EM::Params(componentCount, EM::COV_MAT_DIAGONAL));
}
2012-04-30 22:33:52 +08:00
}
2011-05-01 00:44:34 +08:00
2012-04-30 22:33:52 +08:00
// classify coordinate plane points using the bayes classifier, i.e.
// y(x) = arg max_i=1_modelsCount likelihoods_i(x)
2011-05-01 00:44:34 +08:00
Mat testSample(1, 2, CV_32FC1 );
Mat logLikelihoods(1, nmodels, CV_64FC1, Scalar(-DBL_MAX));
2011-05-01 00:44:34 +08:00
for( int y = 0; y < img.rows; y += testStep )
{
for( int x = 0; x < img.cols; x += testStep )
{
testSample.at<float>(0) = (float)x;
testSample.at<float>(1) = (float)y;
for( i = 0; i < nmodels; i++ )
2012-04-30 22:33:52 +08:00
{
if( !em_models[i].empty() )
logLikelihoods.at<double>(i) = em_models[i]->predict2(testSample, noArray())[0];
2012-04-30 22:33:52 +08:00
}
Point maxLoc;
minMaxLoc(logLikelihoods, 0, 0, 0, &maxLoc);
imgDst.at<Vec3b>(y, x) = classColors[maxLoc.x];
2011-05-01 00:44:34 +08:00
}
}
}
#endif
int main()
{
cout << "Use:" << endl
<< " key '0' .. '1' - switch to class #n" << endl
<< " left mouse button - to add new point;" << endl
<< " key 'r' - to run the ML model;" << endl
<< " key 'i' - to init (clear) the data." << endl << endl;
2011-05-01 00:44:34 +08:00
cv::namedWindow( "points", 1 );
img.create( 480, 640, CV_8UC3 );
imgDst.create( 480, 640, CV_8UC3 );
2011-05-01 00:44:34 +08:00
imshow( "points", img );
setMouseCallback( "points", on_mouse );
2011-05-01 00:44:34 +08:00
classColors[0] = Vec3b(0, 255, 0);
classColors[1] = Vec3b(0, 0, 255);
2011-05-01 00:44:34 +08:00
for(;;)
{
uchar key = (uchar)waitKey();
2011-05-01 00:44:34 +08:00
if( key == 27 ) break;
if( key == 'i' ) // init
{
img = Scalar::all(0);
trainedPoints.clear();
trainedPointsMarkers.clear();
classCounters.assign(MAX_CLASSES, 0);
2011-05-01 00:44:34 +08:00
imshow( winName, img );
}
if( key == '0' || key == '1' )
{
currentClass = key - '0';
}
2011-05-01 00:44:34 +08:00
if( key == 'r' ) // run
{
double minVal = 0;
minMaxLoc(classCounters, &minVal, 0, 0, 0);
if( minVal == 0 )
{
printf("each class should have at least 1 point\n");
continue;
}
img.copyTo( imgDst );
#if _NBC_
find_decision_boundary_NBC();
namedWindow( "NormalBayesClassifier", WINDOW_AUTOSIZE );
imshow( "NormalBayesClassifier", imgDst );
#endif
#if _KNN_
2011-05-01 00:44:34 +08:00
int K = 3;
find_decision_boundary_KNN( K );
namedWindow( "kNN", WINDOW_AUTOSIZE );
imshow( "kNN", imgDst );
2011-05-01 00:44:34 +08:00
K = 15;
find_decision_boundary_KNN( K );
namedWindow( "kNN2", WINDOW_AUTOSIZE );
imshow( "kNN2", imgDst );
2011-05-01 00:44:34 +08:00
#endif
#if _SVM_
2011-05-01 00:44:34 +08:00
//(1)-(2)separable and not sets
SVM::Params params;
params.svmType = SVM::C_SVC;
params.kernelType = SVM::POLY; //CvSVM::LINEAR;
2011-05-01 00:44:34 +08:00
params.degree = 0.5;
params.gamma = 1;
params.coef0 = 1;
params.C = 1;
params.nu = 0.5;
params.p = 0;
params.termCrit = TermCriteria(TermCriteria::MAX_ITER+TermCriteria::EPS, 1000, 0.01);
2011-05-01 00:44:34 +08:00
find_decision_boundary_SVM( params );
namedWindow( "classificationSVM1", WINDOW_AUTOSIZE );
imshow( "classificationSVM1", imgDst );
2011-05-01 00:44:34 +08:00
params.C = 10;
find_decision_boundary_SVM( params );
namedWindow( "classificationSVM2", WINDOW_AUTOSIZE );
imshow( "classificationSVM2", imgDst );
2011-05-01 00:44:34 +08:00
#endif
#if _DT_
2011-05-01 00:44:34 +08:00
find_decision_boundary_DT();
2011-05-01 17:01:57 +08:00
namedWindow( "DT", WINDOW_AUTOSIZE );
imshow( "DT", imgDst );
#endif
#if _BT_
find_decision_boundary_BT();
2011-05-01 17:01:57 +08:00
namedWindow( "BT", WINDOW_AUTOSIZE );
imshow( "BT", imgDst);
#endif
#if _GBT_
find_decision_boundary_GBT();
2011-05-01 17:01:57 +08:00
namedWindow( "GBT", WINDOW_AUTOSIZE );
imshow( "GBT", imgDst);
2011-05-01 00:44:34 +08:00
#endif
#if _RF_
2011-05-01 00:44:34 +08:00
find_decision_boundary_RF();
2011-05-01 17:01:57 +08:00
namedWindow( "RF", WINDOW_AUTOSIZE );
imshow( "RF", imgDst);
#endif
#if _ANN_
2011-05-01 00:44:34 +08:00
Mat layer_sizes1( 1, 3, CV_32SC1 );
layer_sizes1.at<int>(0) = 2;
layer_sizes1.at<int>(1) = 5;
layer_sizes1.at<int>(2) = classColors.size();
find_decision_boundary_ANN( layer_sizes1 );
namedWindow( "ANN", WINDOW_AUTOSIZE );
imshow( "ANN", imgDst );
2011-05-01 00:44:34 +08:00
#endif
#if _EM_
find_decision_boundary_EM();
namedWindow( "EM", WINDOW_AUTOSIZE );
imshow( "EM", imgDst );
2011-05-01 00:44:34 +08:00
#endif
}
}
return 1;
}