mirror of
https://github.com/opencv/opencv.git
synced 2024-11-27 20:50:25 +08:00
Delete function areClassesEmpty().
This commit is contained in:
parent
068677ad50
commit
74c87a26a5
@ -1507,7 +1507,7 @@ public:
|
||||
SVMSGD provides a fast and easy-to-use implementation of the SVM classifier using the Stochastic Gradient Descent approach,
|
||||
as presented in @cite bottou2010large.
|
||||
|
||||
The classifier has 5 parameters. These are
|
||||
The classifier has following parameters:
|
||||
- model type,
|
||||
- margin type,
|
||||
- margin regularization (\f$\lambda\f$),
|
||||
@ -1567,11 +1567,8 @@ To use SVMSGD algorithm do as follows:
|
||||
// Create empty object
|
||||
cv::Ptr<SVMSGD> svmsgd = SVMSGD::create();
|
||||
|
||||
// Set parameters
|
||||
svmsgd->setOptimalParameters();
|
||||
|
||||
// Train the Stochastic Gradient Descent SVM
|
||||
SvmSgd->train(trainData);
|
||||
svmsgd->train(trainData);
|
||||
|
||||
// Predict labels for the new samples
|
||||
svmsgd->predict(samples, responses);
|
||||
|
@ -99,8 +99,6 @@ public:
|
||||
private:
|
||||
void updateWeights(InputArray sample, bool isPositive, float stepSize, Mat &weights);
|
||||
|
||||
std::pair<bool,bool> areClassesEmpty(Mat responses);
|
||||
|
||||
void writeParams( FileStorage &fs ) const;
|
||||
|
||||
void readParams( const FileNode &fn );
|
||||
@ -138,26 +136,6 @@ Ptr<SVMSGD> SVMSGD::create()
|
||||
return makePtr<SVMSGDImpl>();
|
||||
}
|
||||
|
||||
std::pair<bool,bool> SVMSGDImpl::areClassesEmpty(Mat responses)
|
||||
{
|
||||
CV_Assert(responses.cols == 1 || responses.rows == 1);
|
||||
std::pair<bool,bool> emptyInClasses(true, true);
|
||||
int limitIndex = responses.rows;
|
||||
|
||||
for(int index = 0; index < limitIndex; index++)
|
||||
{
|
||||
if (isPositive(responses.at<float>(index)))
|
||||
emptyInClasses.first = false;
|
||||
else
|
||||
emptyInClasses.second = false;
|
||||
|
||||
if (!emptyInClasses.first && ! emptyInClasses.second)
|
||||
break;
|
||||
}
|
||||
|
||||
return emptyInClasses;
|
||||
}
|
||||
|
||||
void SVMSGDImpl::normalizeSamples(Mat &samples, Mat &average, float &multiplier)
|
||||
{
|
||||
int featuresCount = samples.cols;
|
||||
@ -248,16 +226,20 @@ bool SVMSGDImpl::train(const Ptr<TrainData>& data, int)
|
||||
int featureCount = trainSamples.cols;
|
||||
Mat trainResponses = data->getTrainResponses(); // (trainSamplesCount x 1) matrix
|
||||
|
||||
std::pair<bool,bool> areEmpty = areClassesEmpty(trainResponses);
|
||||
CV_Assert(trainResponses.rows == trainSamples.rows);
|
||||
|
||||
if ( areEmpty.first && areEmpty.second )
|
||||
if (trainResponses.empty())
|
||||
{
|
||||
return false;
|
||||
}
|
||||
if ( areEmpty.first || areEmpty.second )
|
||||
|
||||
int positiveCount = countNonZero(trainResponses >= 0);
|
||||
int negativeCount = countNonZero(trainResponses < 0);
|
||||
|
||||
if ( positiveCount <= 0 || negativeCount <= 0 )
|
||||
{
|
||||
weights_ = Mat::zeros(1, featureCount, CV_32F);
|
||||
shift_ = areEmpty.first ? -1.f : 1.f;
|
||||
shift_ = (positiveCount > 0) ? 1.f : -1.f;
|
||||
return true;
|
||||
}
|
||||
|
||||
@ -340,7 +322,7 @@ float SVMSGDImpl::predict( InputArray _samples, OutputArray _results, int ) cons
|
||||
int nSamples = samples.rows;
|
||||
cv::Mat results;
|
||||
|
||||
CV_Assert( samples.cols == weights_.cols && samples.type() == CV_32F );
|
||||
CV_Assert( samples.cols == weights_.cols && samples.type() == CV_32FC1);
|
||||
|
||||
if( _results.needed() )
|
||||
{
|
||||
@ -498,17 +480,7 @@ void SVMSGDImpl::clear()
|
||||
SVMSGDImpl::SVMSGDImpl()
|
||||
{
|
||||
clear();
|
||||
|
||||
params.svmsgdType = -1;
|
||||
params.marginType = -1;
|
||||
|
||||
// Parameters for learning
|
||||
params.marginRegularization = 0; // regularization
|
||||
params.initialStepSize = 0; // learning rate (ideally should be large at beginning and decay each iteration)
|
||||
params.stepDecreasingPower = 0;
|
||||
|
||||
TermCriteria _termCrit(TermCriteria::COUNT + TermCriteria::EPS, 0, 0);
|
||||
params.termCrit = _termCrit;
|
||||
setOptimalParameters();
|
||||
}
|
||||
|
||||
void SVMSGDImpl::setOptimalParameters(int svmsgdType, int marginType)
|
||||
|
@ -182,8 +182,6 @@ void CV_SVMSGDTrainTest::run( int /*start_from*/ )
|
||||
{
|
||||
cv::Ptr<SVMSGD> svmsgd = SVMSGD::create();
|
||||
|
||||
svmsgd->setOptimalParameters();
|
||||
|
||||
svmsgd->train(data);
|
||||
|
||||
Mat responses;
|
||||
|
@ -46,7 +46,6 @@ void addPointRetrainAndRedraw(Data &data, int x, int y, int response);
|
||||
bool doTrain( const Mat samples, const Mat responses, Mat &weights, float &shift)
|
||||
{
|
||||
cv::Ptr<SVMSGD> svmsgd = SVMSGD::create();
|
||||
svmsgd->setOptimalParameters();
|
||||
|
||||
cv::Ptr<TrainData> trainData = TrainData::create(samples, cv::ml::ROW_SAMPLE, responses);
|
||||
svmsgd->train( trainData );
|
||||
|
Loading…
Reference in New Issue
Block a user