2013-08-05 21:42:59 +08:00
|
|
|
///////////////////////////////////////////////////////////////////////////////////////
|
|
|
|
// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
|
|
|
|
|
|
|
|
// By downloading, copying, installing or using the software you agree to this license.
|
|
|
|
// If you do not agree to this license, do not download, install,
|
|
|
|
// copy or use the software.
|
|
|
|
|
|
|
|
// This is a implementation of the Logistic Regression algorithm in C++ in OpenCV.
|
|
|
|
|
|
|
|
// AUTHOR:
|
|
|
|
// Rahul Kavi rahulkavi[at]live[at]com
|
|
|
|
|
|
|
|
// # You are free to use, change, or redistribute the code in any way you wish for
|
|
|
|
// # non-commercial purposes, but please maintain the name of the original author.
|
|
|
|
// # This code comes with no warranty of any kind.
|
|
|
|
|
|
|
|
// #
|
|
|
|
// # You are free to use, change, or redistribute the code in any way you wish for
|
|
|
|
// # non-commercial purposes, but please maintain the name of the original author.
|
|
|
|
// # This code comes with no warranty of any kind.
|
|
|
|
|
|
|
|
// # Logistic Regression ALGORITHM
|
|
|
|
|
|
|
|
|
|
|
|
// License Agreement
|
|
|
|
// For Open Source Computer Vision Library
|
|
|
|
|
|
|
|
// Copyright (C) 2000-2008, Intel Corporation, all rights reserved.
|
|
|
|
// Copyright (C) 2008-2011, Willow Garage Inc., all rights reserved.
|
|
|
|
// Third party copyrights are property of their respective owners.
|
|
|
|
|
|
|
|
// Redistribution and use in source and binary forms, with or without modification,
|
|
|
|
// are permitted provided that the following conditions are met:
|
|
|
|
|
|
|
|
// * Redistributions of source code must retain the above copyright notice,
|
|
|
|
// this list of conditions and the following disclaimer.
|
|
|
|
|
|
|
|
// * Redistributions in binary form must reproduce the above copyright notice,
|
|
|
|
// this list of conditions and the following disclaimer in the documentation
|
|
|
|
// and/or other materials provided with the distribution.
|
|
|
|
|
|
|
|
// * The name of the copyright holders may not be used to endorse or promote products
|
|
|
|
// derived from this software without specific prior written permission.
|
|
|
|
|
|
|
|
// This software is provided by the copyright holders and contributors "as is" and
|
|
|
|
// any express or implied warranties, including, but not limited to, the implied
|
|
|
|
// warranties of merchantability and fitness for a particular purpose are disclaimed.
|
|
|
|
// In no event shall the Intel Corporation or contributors be liable for any direct,
|
|
|
|
// indirect, incidental, special, exemplary, or consequential damages
|
|
|
|
// (including, but not limited to, procurement of substitute goods or services;
|
|
|
|
// loss of use, data, or profits; or business interruption) however caused
|
|
|
|
// and on any theory of liability, whether in contract, strict liability,
|
|
|
|
// or tort (including negligence or otherwise) arising in any way out of
|
|
|
|
// the use of this software, even if advised of the possibility of such damage.
|
|
|
|
|
|
|
|
#include "precomp.hpp"
|
|
|
|
|
|
|
|
using namespace std;
|
|
|
|
|
2014-08-14 23:01:45 +08:00
|
|
|
namespace cv {
|
|
|
|
namespace ml {
|
|
|
|
|
2015-02-11 18:24:14 +08:00
|
|
|
class LrParams
|
2013-08-05 21:42:59 +08:00
|
|
|
{
|
2015-02-11 18:24:14 +08:00
|
|
|
public:
|
|
|
|
LrParams()
|
|
|
|
{
|
|
|
|
alpha = 0.001;
|
|
|
|
num_iters = 1000;
|
|
|
|
norm = LogisticRegression::REG_L2;
|
|
|
|
train_method = LogisticRegression::BATCH;
|
|
|
|
mini_batch_size = 1;
|
|
|
|
term_crit = TermCriteria(TermCriteria::COUNT + TermCriteria::EPS, num_iters, alpha);
|
|
|
|
}
|
|
|
|
|
|
|
|
double alpha; //!< learning rate.
|
|
|
|
int num_iters; //!< number of iterations.
|
|
|
|
int norm;
|
|
|
|
int train_method;
|
|
|
|
int mini_batch_size;
|
|
|
|
TermCriteria term_crit;
|
|
|
|
};
|
2013-08-05 21:42:59 +08:00
|
|
|
|
2014-08-14 23:01:45 +08:00
|
|
|
class LogisticRegressionImpl : public LogisticRegression
|
2013-08-05 21:42:59 +08:00
|
|
|
{
|
2014-08-14 23:01:45 +08:00
|
|
|
public:
|
2015-02-11 18:24:14 +08:00
|
|
|
|
|
|
|
LogisticRegressionImpl() { }
|
2014-08-14 23:01:45 +08:00
|
|
|
virtual ~LogisticRegressionImpl() {}
|
|
|
|
|
2015-02-11 18:24:14 +08:00
|
|
|
CV_IMPL_PROPERTY(double, LearningRate, params.alpha)
|
|
|
|
CV_IMPL_PROPERTY(int, Iterations, params.num_iters)
|
|
|
|
CV_IMPL_PROPERTY(int, Regularization, params.norm)
|
|
|
|
CV_IMPL_PROPERTY(int, TrainMethod, params.train_method)
|
|
|
|
CV_IMPL_PROPERTY(int, MiniBatchSize, params.mini_batch_size)
|
|
|
|
CV_IMPL_PROPERTY(TermCriteria, TermCriteria, params.term_crit)
|
|
|
|
|
2014-08-14 23:01:45 +08:00
|
|
|
virtual bool train( const Ptr<TrainData>& trainData, int=0 );
|
2016-01-14 01:44:47 +08:00
|
|
|
virtual float predict(InputArray samples, OutputArray results, int flags=0) const;
|
2014-08-14 23:01:45 +08:00
|
|
|
virtual void clear();
|
|
|
|
virtual void write(FileStorage& fs) const;
|
|
|
|
virtual void read(const FileNode& fn);
|
2016-01-14 01:44:47 +08:00
|
|
|
virtual Mat get_learnt_thetas() const { return learnt_thetas; }
|
2014-08-14 23:01:45 +08:00
|
|
|
virtual int getVarCount() const { return learnt_thetas.cols; }
|
|
|
|
virtual bool isTrained() const { return !learnt_thetas.empty(); }
|
|
|
|
virtual bool isClassifier() const { return true; }
|
2015-04-07 21:44:26 +08:00
|
|
|
virtual String getDefaultName() const { return "opencv_ml_lr"; }
|
2014-08-14 23:01:45 +08:00
|
|
|
protected:
|
2014-08-18 22:15:10 +08:00
|
|
|
Mat calc_sigmoid(const Mat& data) const;
|
|
|
|
double compute_cost(const Mat& _data, const Mat& _labels, const Mat& _init_theta);
|
2015-11-03 08:16:49 +08:00
|
|
|
void compute_gradient(const Mat& _data, const Mat& _labels, const Mat &_theta, const double _lambda, Mat & _gradient );
|
|
|
|
Mat batch_gradient_descent(const Mat& _data, const Mat& _labels, const Mat& _init_theta);
|
|
|
|
Mat mini_batch_gradient_descent(const Mat& _data, const Mat& _labels, const Mat& _init_theta);
|
2014-08-18 22:15:10 +08:00
|
|
|
bool set_label_map(const Mat& _labels_i);
|
|
|
|
Mat remap_labels(const Mat& _labels_i, const map<int, int>& lmap) const;
|
2014-08-14 23:01:45 +08:00
|
|
|
protected:
|
2015-02-11 18:24:14 +08:00
|
|
|
LrParams params;
|
2014-08-18 22:15:10 +08:00
|
|
|
Mat learnt_thetas;
|
2014-08-14 23:01:45 +08:00
|
|
|
map<int, int> forward_mapper;
|
|
|
|
map<int, int> reverse_mapper;
|
2014-08-18 22:15:10 +08:00
|
|
|
Mat labels_o;
|
|
|
|
Mat labels_n;
|
2014-08-14 23:01:45 +08:00
|
|
|
};
|
|
|
|
|
2015-02-11 18:24:14 +08:00
|
|
|
Ptr<LogisticRegression> LogisticRegression::create()
|
2013-08-05 21:42:59 +08:00
|
|
|
{
|
2015-02-11 18:24:14 +08:00
|
|
|
return makePtr<LogisticRegressionImpl>();
|
2013-08-05 21:42:59 +08:00
|
|
|
}
|
|
|
|
|
2017-01-29 21:21:55 +08:00
|
|
|
Ptr<LogisticRegression> LogisticRegression::load(const String& filepath, const String& nodeName)
|
|
|
|
{
|
|
|
|
return Algorithm::load<LogisticRegression>(filepath, nodeName);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-08-14 23:01:45 +08:00
|
|
|
bool LogisticRegressionImpl::train(const Ptr<TrainData>& trainData, int)
|
2013-08-05 21:42:59 +08:00
|
|
|
{
|
2017-05-25 23:59:01 +08:00
|
|
|
CV_TRACE_FUNCTION_SKIP_NESTED();
|
2016-01-14 01:44:47 +08:00
|
|
|
// return value
|
|
|
|
bool ok = false;
|
|
|
|
|
2017-04-17 19:28:14 +08:00
|
|
|
if (trainData.empty()) {
|
|
|
|
return false;
|
|
|
|
}
|
2013-10-04 20:32:12 +08:00
|
|
|
clear();
|
2014-08-18 22:15:10 +08:00
|
|
|
Mat _data_i = trainData->getSamples();
|
|
|
|
Mat _labels_i = trainData->getResponses();
|
2013-08-05 21:42:59 +08:00
|
|
|
|
2016-01-14 01:44:47 +08:00
|
|
|
// check size and type of training data
|
2013-10-04 20:32:12 +08:00
|
|
|
CV_Assert( !_labels_i.empty() && !_data_i.empty());
|
2013-08-05 21:42:59 +08:00
|
|
|
if(_labels_i.cols != 1)
|
|
|
|
{
|
2016-01-14 01:44:47 +08:00
|
|
|
CV_Error( CV_StsBadArg, "labels should be a column matrix" );
|
2013-08-05 21:42:59 +08:00
|
|
|
}
|
2016-01-14 01:44:47 +08:00
|
|
|
if(_data_i.type() != CV_32FC1 || _labels_i.type() != CV_32FC1)
|
2013-08-05 21:42:59 +08:00
|
|
|
{
|
2013-11-05 18:34:45 +08:00
|
|
|
CV_Error( CV_StsBadArg, "data and labels must be a floating point matrix" );
|
2013-08-05 21:42:59 +08:00
|
|
|
}
|
2016-01-14 01:44:47 +08:00
|
|
|
if(_labels_i.rows != _data_i.rows)
|
|
|
|
{
|
|
|
|
CV_Error( CV_StsBadArg, "number of rows in data and labels should be equal" );
|
|
|
|
}
|
2013-08-05 21:42:59 +08:00
|
|
|
|
2016-01-14 01:44:47 +08:00
|
|
|
// class labels
|
2013-08-05 21:42:59 +08:00
|
|
|
set_label_map(_labels_i);
|
2016-01-14 01:44:47 +08:00
|
|
|
Mat labels_l = remap_labels(_labels_i, this->forward_mapper);
|
2014-02-12 20:24:40 +08:00
|
|
|
int num_classes = (int) this->forward_mapper.size();
|
2013-08-05 21:42:59 +08:00
|
|
|
if(num_classes < 2)
|
|
|
|
{
|
2013-11-05 18:34:45 +08:00
|
|
|
CV_Error( CV_StsBadArg, "data should have atleast 2 classes" );
|
2013-08-05 21:42:59 +08:00
|
|
|
}
|
|
|
|
|
2016-01-14 01:44:47 +08:00
|
|
|
// add a column of ones to the data (bias/intercept term)
|
|
|
|
Mat data_t;
|
|
|
|
hconcat( cv::Mat::ones( _data_i.rows, 1, CV_32F ), _data_i, data_t );
|
2013-08-05 21:42:59 +08:00
|
|
|
|
2016-01-14 01:44:47 +08:00
|
|
|
// coefficient matrix (zero-initialized)
|
|
|
|
Mat thetas;
|
2014-08-18 22:15:10 +08:00
|
|
|
Mat init_theta = Mat::zeros(data_t.cols, 1, CV_32F);
|
2013-08-05 21:42:59 +08:00
|
|
|
|
2016-01-14 01:44:47 +08:00
|
|
|
// fit the model (handles binary and multiclass cases)
|
2014-08-18 22:15:10 +08:00
|
|
|
Mat new_theta;
|
2016-01-14 01:44:47 +08:00
|
|
|
Mat labels;
|
2013-08-05 21:42:59 +08:00
|
|
|
if(num_classes == 2)
|
|
|
|
{
|
|
|
|
labels_l.convertTo(labels, CV_32F);
|
2013-11-05 18:34:45 +08:00
|
|
|
if(this->params.train_method == LogisticRegression::BATCH)
|
2015-11-03 08:16:49 +08:00
|
|
|
new_theta = batch_gradient_descent(data_t, labels, init_theta);
|
2013-11-05 18:34:45 +08:00
|
|
|
else
|
2015-11-03 08:16:49 +08:00
|
|
|
new_theta = mini_batch_gradient_descent(data_t, labels, init_theta);
|
2013-08-05 21:42:59 +08:00
|
|
|
thetas = new_theta.t();
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
/* take each class and rename classes you will get a theta per class
|
|
|
|
as in multi class class scenario, we will have n thetas for n classes */
|
2016-01-14 01:44:47 +08:00
|
|
|
thetas.create(num_classes, data_t.cols, CV_32F);
|
|
|
|
Mat labels_binary;
|
|
|
|
int ii = 0;
|
2013-08-05 21:42:59 +08:00
|
|
|
for(map<int,int>::iterator it = this->forward_mapper.begin(); it != this->forward_mapper.end(); ++it)
|
|
|
|
{
|
2016-01-14 01:44:47 +08:00
|
|
|
// one-vs-rest (OvR) scheme
|
|
|
|
labels_binary = (labels_l == it->second)/255;
|
|
|
|
labels_binary.convertTo(labels, CV_32F);
|
2013-11-05 18:34:45 +08:00
|
|
|
if(this->params.train_method == LogisticRegression::BATCH)
|
2015-11-03 08:16:49 +08:00
|
|
|
new_theta = batch_gradient_descent(data_t, labels, init_theta);
|
2013-11-05 18:34:45 +08:00
|
|
|
else
|
2015-11-03 08:16:49 +08:00
|
|
|
new_theta = mini_batch_gradient_descent(data_t, labels, init_theta);
|
2013-08-05 21:42:59 +08:00
|
|
|
hconcat(new_theta.t(), thetas.row(ii));
|
|
|
|
ii += 1;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-01-14 01:44:47 +08:00
|
|
|
// check that the estimates are stable and finite
|
2013-08-05 21:42:59 +08:00
|
|
|
this->learnt_thetas = thetas.clone();
|
2014-08-18 22:15:10 +08:00
|
|
|
if( cvIsNaN( (double)sum(this->learnt_thetas)[0] ) )
|
2013-08-05 21:42:59 +08:00
|
|
|
{
|
2013-11-05 18:34:45 +08:00
|
|
|
CV_Error( CV_StsBadArg, "check training parameters. Invalid training classifier" );
|
2013-08-05 21:42:59 +08:00
|
|
|
}
|
2016-01-14 01:44:47 +08:00
|
|
|
|
|
|
|
// success
|
2013-08-05 21:42:59 +08:00
|
|
|
ok = true;
|
|
|
|
return ok;
|
|
|
|
}
|
|
|
|
|
2015-11-03 08:16:49 +08:00
|
|
|
float LogisticRegressionImpl::predict(InputArray samples, OutputArray results, int flags) const
|
2013-08-05 21:42:59 +08:00
|
|
|
{
|
|
|
|
// check if learnt_mats array is populated
|
2016-01-14 01:44:47 +08:00
|
|
|
if(!this->isTrained())
|
2013-08-05 21:42:59 +08:00
|
|
|
{
|
2013-11-05 18:34:45 +08:00
|
|
|
CV_Error( CV_StsBadArg, "classifier should be trained first" );
|
2013-08-05 21:42:59 +08:00
|
|
|
}
|
2015-11-03 08:16:49 +08:00
|
|
|
|
2016-01-14 01:44:47 +08:00
|
|
|
// coefficient matrix
|
|
|
|
Mat thetas;
|
2015-11-03 08:16:49 +08:00
|
|
|
if ( learnt_thetas.type() == CV_32F )
|
2013-08-05 21:42:59 +08:00
|
|
|
{
|
2015-11-03 08:16:49 +08:00
|
|
|
thetas = learnt_thetas;
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
this->learnt_thetas.convertTo( thetas, CV_32F );
|
2013-08-05 21:42:59 +08:00
|
|
|
}
|
|
|
|
CV_Assert(thetas.rows > 0);
|
|
|
|
|
2016-01-14 01:44:47 +08:00
|
|
|
// data samples
|
|
|
|
Mat data = samples.getMat();
|
|
|
|
if(data.type() != CV_32F)
|
|
|
|
{
|
|
|
|
CV_Error( CV_StsBadArg, "data must be of floating type" );
|
|
|
|
}
|
2013-08-05 21:42:59 +08:00
|
|
|
|
2016-01-14 01:44:47 +08:00
|
|
|
// add a column of ones to the data (bias/intercept term)
|
|
|
|
Mat data_t;
|
|
|
|
hconcat( cv::Mat::ones( data.rows, 1, CV_32F ), data, data_t );
|
|
|
|
CV_Assert(data_t.cols == thetas.cols);
|
2013-08-05 21:42:59 +08:00
|
|
|
|
2016-01-14 01:44:47 +08:00
|
|
|
// predict class labels for samples (handles binary and multiclass cases)
|
2014-08-18 22:15:10 +08:00
|
|
|
Mat labels_c;
|
2016-01-14 01:44:47 +08:00
|
|
|
Mat pred_m;
|
2014-08-18 22:15:10 +08:00
|
|
|
Mat temp_pred;
|
2013-08-05 21:42:59 +08:00
|
|
|
if(thetas.rows == 1)
|
|
|
|
{
|
2016-01-14 01:44:47 +08:00
|
|
|
// apply sigmoid function
|
|
|
|
temp_pred = calc_sigmoid(data_t * thetas.t());
|
2013-08-05 21:42:59 +08:00
|
|
|
CV_Assert(temp_pred.cols==1);
|
2016-01-14 01:38:49 +08:00
|
|
|
pred_m = temp_pred.clone();
|
2013-10-04 20:32:12 +08:00
|
|
|
|
2013-08-05 21:42:59 +08:00
|
|
|
// if greater than 0.5, predict class 0 or predict class 1
|
2016-01-14 01:44:47 +08:00
|
|
|
temp_pred = (temp_pred > 0.5f) / 255;
|
2013-08-05 21:42:59 +08:00
|
|
|
temp_pred.convertTo(labels_c, CV_32S);
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
2016-01-14 01:44:47 +08:00
|
|
|
// apply sigmoid function
|
|
|
|
pred_m.create(data_t.rows, thetas.rows, data.type());
|
|
|
|
for(int i = 0; i < thetas.rows; i++)
|
2013-08-05 21:42:59 +08:00
|
|
|
{
|
|
|
|
temp_pred = calc_sigmoid(data_t * thetas.row(i).t());
|
2014-08-18 22:15:10 +08:00
|
|
|
vconcat(temp_pred, pred_m.col(i));
|
2013-08-05 21:42:59 +08:00
|
|
|
}
|
2016-01-14 01:44:47 +08:00
|
|
|
|
|
|
|
// predict class with the maximum output
|
|
|
|
Point max_loc;
|
|
|
|
Mat labels;
|
|
|
|
for(int i = 0; i < pred_m.rows; i++)
|
2013-08-05 21:42:59 +08:00
|
|
|
{
|
|
|
|
temp_pred = pred_m.row(i);
|
2016-01-14 01:44:47 +08:00
|
|
|
minMaxLoc( temp_pred, NULL, NULL, NULL, &max_loc );
|
2013-08-05 21:42:59 +08:00
|
|
|
labels.push_back(max_loc.x);
|
|
|
|
}
|
|
|
|
labels.convertTo(labels_c, CV_32S);
|
|
|
|
}
|
2016-01-14 01:44:47 +08:00
|
|
|
|
|
|
|
// return label of the predicted class. class names can be 1,2,3,...
|
|
|
|
Mat pred_labs = remap_labels(labels_c, this->reverse_mapper);
|
2013-10-04 20:32:12 +08:00
|
|
|
pred_labs.convertTo(pred_labs, CV_32S);
|
2015-11-03 08:16:49 +08:00
|
|
|
|
|
|
|
// return either the labels or the raw output
|
|
|
|
if ( results.needed() )
|
|
|
|
{
|
2016-01-14 01:44:47 +08:00
|
|
|
if ( flags & StatModel::RAW_OUTPUT )
|
2015-11-03 08:16:49 +08:00
|
|
|
{
|
|
|
|
pred_m.copyTo( results );
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
pred_labs.copyTo(results);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-01-14 01:44:47 +08:00
|
|
|
return ( pred_labs.empty() ? 0.f : static_cast<float>(pred_labs.at<int>(0)) );
|
2013-08-05 21:42:59 +08:00
|
|
|
}
|
|
|
|
|
2014-08-18 22:15:10 +08:00
|
|
|
Mat LogisticRegressionImpl::calc_sigmoid(const Mat& data) const
|
2013-08-05 21:42:59 +08:00
|
|
|
{
|
2017-05-25 23:59:01 +08:00
|
|
|
CV_TRACE_FUNCTION();
|
2014-08-18 22:15:10 +08:00
|
|
|
Mat dest;
|
|
|
|
exp(-data, dest);
|
2013-08-05 21:42:59 +08:00
|
|
|
return 1.0/(1.0+dest);
|
|
|
|
}
|
|
|
|
|
2014-08-18 22:15:10 +08:00
|
|
|
double LogisticRegressionImpl::compute_cost(const Mat& _data, const Mat& _labels, const Mat& _init_theta)
|
2013-08-05 21:42:59 +08:00
|
|
|
{
|
2017-05-25 23:59:01 +08:00
|
|
|
CV_TRACE_FUNCTION();
|
2016-12-31 19:07:51 +08:00
|
|
|
float llambda = 0; /*changed llambda from int to float to solve issue #7924*/
|
2013-08-05 21:42:59 +08:00
|
|
|
int m;
|
|
|
|
int n;
|
|
|
|
double cost = 0;
|
|
|
|
double rparameter = 0;
|
2014-08-18 22:15:10 +08:00
|
|
|
Mat theta_b;
|
|
|
|
Mat theta_c;
|
|
|
|
Mat d_a;
|
|
|
|
Mat d_b;
|
2013-08-05 21:42:59 +08:00
|
|
|
|
|
|
|
m = _data.rows;
|
|
|
|
n = _data.cols;
|
|
|
|
|
|
|
|
theta_b = _init_theta(Range(1, n), Range::all());
|
|
|
|
|
2015-03-12 00:09:15 +08:00
|
|
|
if (params.norm != REG_DISABLE)
|
2013-08-05 21:42:59 +08:00
|
|
|
{
|
|
|
|
llambda = 1;
|
|
|
|
}
|
|
|
|
|
2013-10-04 20:32:12 +08:00
|
|
|
if(this->params.norm == LogisticRegression::REG_L1)
|
2013-08-05 21:42:59 +08:00
|
|
|
{
|
2014-08-18 22:15:10 +08:00
|
|
|
rparameter = (llambda/(2*m)) * sum(theta_b)[0];
|
2013-08-05 21:42:59 +08:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
// assuming it to be L2 by default
|
2015-11-03 08:16:49 +08:00
|
|
|
multiply(theta_b, theta_b, theta_c, 1);
|
2014-08-18 22:15:10 +08:00
|
|
|
rparameter = (llambda/(2*m)) * sum(theta_c)[0];
|
2013-08-05 21:42:59 +08:00
|
|
|
}
|
|
|
|
|
2015-11-03 08:16:49 +08:00
|
|
|
d_a = calc_sigmoid(_data * _init_theta);
|
2014-08-18 22:15:10 +08:00
|
|
|
log(d_a, d_a);
|
|
|
|
multiply(d_a, _labels, d_a);
|
2013-08-05 21:42:59 +08:00
|
|
|
|
2015-11-03 08:16:49 +08:00
|
|
|
// use the fact that: log(1 - sigmoid(x)) = log(sigmoid(-x))
|
|
|
|
d_b = calc_sigmoid(- _data * _init_theta);
|
2014-08-18 22:15:10 +08:00
|
|
|
log(d_b, d_b);
|
|
|
|
multiply(d_b, 1-_labels, d_b);
|
2013-08-05 21:42:59 +08:00
|
|
|
|
2014-08-18 22:15:10 +08:00
|
|
|
cost = (-1.0/m) * (sum(d_a)[0] + sum(d_b)[0]);
|
2013-08-05 21:42:59 +08:00
|
|
|
cost = cost + rparameter;
|
|
|
|
|
2015-11-03 08:16:49 +08:00
|
|
|
if(cvIsNaN( cost ) == 1)
|
|
|
|
{
|
|
|
|
CV_Error( CV_StsBadArg, "check training parameters. Invalid training classifier" );
|
|
|
|
}
|
|
|
|
|
2013-08-05 21:42:59 +08:00
|
|
|
return cost;
|
|
|
|
}
|
|
|
|
|
2016-03-29 16:09:54 +08:00
|
|
|
struct LogisticRegressionImpl_ComputeDradient_Impl : ParallelLoopBody
|
|
|
|
{
|
|
|
|
const Mat* data;
|
|
|
|
const Mat* theta;
|
|
|
|
const Mat* pcal_a;
|
|
|
|
Mat* gradient;
|
|
|
|
double lambda;
|
|
|
|
|
|
|
|
LogisticRegressionImpl_ComputeDradient_Impl(const Mat& _data, const Mat &_theta, const Mat& _pcal_a, const double _lambda, Mat & _gradient)
|
|
|
|
: data(&_data)
|
|
|
|
, theta(&_theta)
|
|
|
|
, pcal_a(&_pcal_a)
|
|
|
|
, gradient(&_gradient)
|
|
|
|
, lambda(_lambda)
|
|
|
|
{
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
void operator()(const cv::Range& r) const
|
|
|
|
{
|
|
|
|
const Mat& _data = *data;
|
|
|
|
const Mat &_theta = *theta;
|
|
|
|
Mat & _gradient = *gradient;
|
|
|
|
const Mat & _pcal_a = *pcal_a;
|
|
|
|
const int m = _data.rows;
|
|
|
|
Mat pcal_ab;
|
|
|
|
|
|
|
|
for (int ii = r.start; ii<r.end; ii++)
|
|
|
|
{
|
|
|
|
Mat pcal_b = _data(Range::all(), Range(ii,ii+1));
|
|
|
|
multiply(_pcal_a, pcal_b, pcal_ab, 1);
|
|
|
|
|
|
|
|
_gradient.row(ii) = (1.0/m)*sum(pcal_ab)[0] + (lambda/m) * _theta.row(ii);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
};
|
2015-11-03 08:16:49 +08:00
|
|
|
|
|
|
|
void LogisticRegressionImpl::compute_gradient(const Mat& _data, const Mat& _labels, const Mat &_theta, const double _lambda, Mat & _gradient )
|
|
|
|
{
|
2017-05-25 23:59:01 +08:00
|
|
|
CV_TRACE_FUNCTION();
|
2015-11-03 08:16:49 +08:00
|
|
|
const int m = _data.rows;
|
|
|
|
Mat pcal_a, pcal_b, pcal_ab;
|
|
|
|
|
|
|
|
const Mat z = _data * _theta;
|
|
|
|
|
|
|
|
CV_Assert( _gradient.rows == _theta.rows && _gradient.cols == _theta.cols );
|
|
|
|
|
|
|
|
pcal_a = calc_sigmoid(z) - _labels;
|
|
|
|
pcal_b = _data(Range::all(), Range(0,1));
|
|
|
|
multiply(pcal_a, pcal_b, pcal_ab, 1);
|
|
|
|
|
|
|
|
_gradient.row(0) = ((float)1/m) * sum(pcal_ab)[0];
|
|
|
|
|
|
|
|
//cout<<"for each training data entry"<<endl;
|
2016-03-29 16:09:54 +08:00
|
|
|
LogisticRegressionImpl_ComputeDradient_Impl invoker(_data, _theta, pcal_a, _lambda, _gradient);
|
|
|
|
cv::parallel_for_(cv::Range(1, _gradient.rows), invoker);
|
2015-11-03 08:16:49 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
Mat LogisticRegressionImpl::batch_gradient_descent(const Mat& _data, const Mat& _labels, const Mat& _init_theta)
|
2013-08-05 21:42:59 +08:00
|
|
|
{
|
2017-05-25 23:59:01 +08:00
|
|
|
CV_TRACE_FUNCTION();
|
2013-08-05 21:42:59 +08:00
|
|
|
// implements batch gradient descent
|
|
|
|
if(this->params.alpha<=0)
|
|
|
|
{
|
2015-11-03 08:16:49 +08:00
|
|
|
CV_Error( CV_StsBadArg, "check training parameters (learning rate) for the classifier" );
|
2013-08-05 21:42:59 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
if(this->params.num_iters <= 0)
|
|
|
|
{
|
2013-11-05 18:34:45 +08:00
|
|
|
CV_Error( CV_StsBadArg, "number of iterations cannot be zero or a negative number" );
|
2013-08-05 21:42:59 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
int llambda = 0;
|
2015-11-03 08:16:49 +08:00
|
|
|
int m;
|
2014-08-18 22:15:10 +08:00
|
|
|
Mat theta_p = _init_theta.clone();
|
2015-11-03 08:16:49 +08:00
|
|
|
Mat gradient( theta_p.rows, theta_p.cols, theta_p.type() );
|
2013-08-05 21:42:59 +08:00
|
|
|
m = _data.rows;
|
|
|
|
|
2015-03-12 00:09:15 +08:00
|
|
|
if (params.norm != REG_DISABLE)
|
2013-08-05 21:42:59 +08:00
|
|
|
{
|
|
|
|
llambda = 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
for(int i = 0;i<this->params.num_iters;i++)
|
|
|
|
{
|
2015-11-03 08:16:49 +08:00
|
|
|
// this seems to only be called to ensure that cost is not NaN
|
|
|
|
compute_cost(_data, _labels, theta_p);
|
2013-08-05 21:42:59 +08:00
|
|
|
|
2015-11-03 08:16:49 +08:00
|
|
|
compute_gradient( _data, _labels, theta_p, llambda, gradient );
|
2013-08-05 21:42:59 +08:00
|
|
|
|
|
|
|
theta_p = theta_p - ( static_cast<double>(this->params.alpha)/m)*gradient;
|
|
|
|
}
|
|
|
|
return theta_p;
|
|
|
|
}
|
|
|
|
|
2015-11-03 08:16:49 +08:00
|
|
|
Mat LogisticRegressionImpl::mini_batch_gradient_descent(const Mat& _data, const Mat& _labels, const Mat& _init_theta)
|
2013-08-05 21:42:59 +08:00
|
|
|
{
|
|
|
|
// implements batch gradient descent
|
|
|
|
int lambda_l = 0;
|
2015-11-03 08:16:49 +08:00
|
|
|
int m;
|
2013-08-05 21:42:59 +08:00
|
|
|
int j = 0;
|
2013-10-04 20:32:12 +08:00
|
|
|
int size_b = this->params.mini_batch_size;
|
2013-08-05 21:42:59 +08:00
|
|
|
|
2013-10-04 20:32:12 +08:00
|
|
|
if(this->params.mini_batch_size <= 0 || this->params.alpha == 0)
|
2013-08-05 21:42:59 +08:00
|
|
|
{
|
2013-11-05 18:34:45 +08:00
|
|
|
CV_Error( CV_StsBadArg, "check training parameters for the classifier" );
|
2013-08-05 21:42:59 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
if(this->params.num_iters <= 0)
|
|
|
|
{
|
2013-11-05 18:34:45 +08:00
|
|
|
CV_Error( CV_StsBadArg, "number of iterations cannot be zero or a negative number" );
|
2013-08-05 21:42:59 +08:00
|
|
|
}
|
|
|
|
|
2014-08-18 22:15:10 +08:00
|
|
|
Mat theta_p = _init_theta.clone();
|
2015-11-03 08:16:49 +08:00
|
|
|
Mat gradient( theta_p.rows, theta_p.cols, theta_p.type() );
|
2014-08-18 22:15:10 +08:00
|
|
|
Mat data_d;
|
|
|
|
Mat labels_l;
|
2013-08-05 21:42:59 +08:00
|
|
|
|
2015-03-12 00:09:15 +08:00
|
|
|
if (params.norm != REG_DISABLE)
|
2013-08-05 21:42:59 +08:00
|
|
|
{
|
|
|
|
lambda_l = 1;
|
|
|
|
}
|
|
|
|
|
2014-08-18 17:11:02 +08:00
|
|
|
for(int i = 0;i<this->params.term_crit.maxCount;i++)
|
2013-08-05 21:42:59 +08:00
|
|
|
{
|
|
|
|
if(j+size_b<=_data.rows)
|
|
|
|
{
|
|
|
|
data_d = _data(Range(j,j+size_b), Range::all());
|
|
|
|
labels_l = _labels(Range(j,j+size_b),Range::all());
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
data_d = _data(Range(j, _data.rows), Range::all());
|
|
|
|
labels_l = _labels(Range(j, _labels.rows),Range::all());
|
|
|
|
}
|
|
|
|
|
|
|
|
m = data_d.rows;
|
|
|
|
|
2015-11-03 08:16:49 +08:00
|
|
|
// this seems to only be called to ensure that cost is not NaN
|
|
|
|
compute_cost(data_d, labels_l, theta_p);
|
2013-08-05 21:42:59 +08:00
|
|
|
|
2015-11-03 08:16:49 +08:00
|
|
|
compute_gradient(data_d, labels_l, theta_p, lambda_l, gradient);
|
2013-08-05 21:42:59 +08:00
|
|
|
|
|
|
|
theta_p = theta_p - ( static_cast<double>(this->params.alpha)/m)*gradient;
|
|
|
|
|
2015-11-03 08:16:49 +08:00
|
|
|
j += this->params.mini_batch_size;
|
2013-08-05 21:42:59 +08:00
|
|
|
|
2015-11-03 08:16:49 +08:00
|
|
|
// if parsed through all data variables
|
|
|
|
if (j >= _data.rows) {
|
|
|
|
j = 0;
|
2013-08-05 21:42:59 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
return theta_p;
|
|
|
|
}
|
|
|
|
|
2014-08-18 22:15:10 +08:00
|
|
|
bool LogisticRegressionImpl::set_label_map(const Mat &_labels_i)
|
2013-08-05 21:42:59 +08:00
|
|
|
{
|
2013-10-04 20:32:12 +08:00
|
|
|
// this function creates two maps to map user defined labels to program friendly labels two ways.
|
2013-08-05 21:42:59 +08:00
|
|
|
int ii = 0;
|
2014-08-18 22:15:10 +08:00
|
|
|
Mat labels;
|
2013-08-05 21:42:59 +08:00
|
|
|
|
2014-08-18 22:15:10 +08:00
|
|
|
this->labels_o = Mat(0,1, CV_8U);
|
|
|
|
this->labels_n = Mat(0,1, CV_8U);
|
2013-08-05 21:42:59 +08:00
|
|
|
|
|
|
|
_labels_i.convertTo(labels, CV_32S);
|
|
|
|
|
|
|
|
for(int i = 0;i<labels.rows;i++)
|
|
|
|
{
|
|
|
|
this->forward_mapper[labels.at<int>(i)] += 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
for(map<int,int>::iterator it = this->forward_mapper.begin(); it != this->forward_mapper.end(); ++it)
|
|
|
|
{
|
|
|
|
this->forward_mapper[it->first] = ii;
|
|
|
|
this->labels_o.push_back(it->first);
|
|
|
|
this->labels_n.push_back(ii);
|
|
|
|
ii += 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
for(map<int,int>::iterator it = this->forward_mapper.begin(); it != this->forward_mapper.end(); ++it)
|
|
|
|
{
|
|
|
|
this->reverse_mapper[it->second] = it->first;
|
|
|
|
}
|
|
|
|
|
2014-08-18 22:15:10 +08:00
|
|
|
return true;
|
2013-08-05 21:42:59 +08:00
|
|
|
}
|
|
|
|
|
2014-08-18 22:15:10 +08:00
|
|
|
Mat LogisticRegressionImpl::remap_labels(const Mat& _labels_i, const map<int, int>& lmap) const
|
2013-08-05 21:42:59 +08:00
|
|
|
{
|
2014-08-18 22:15:10 +08:00
|
|
|
Mat labels;
|
2013-08-05 21:42:59 +08:00
|
|
|
_labels_i.convertTo(labels, CV_32S);
|
|
|
|
|
2014-08-18 22:15:10 +08:00
|
|
|
Mat new_labels = Mat::zeros(labels.rows, labels.cols, labels.type());
|
2013-08-05 21:42:59 +08:00
|
|
|
|
2014-12-27 17:54:34 +08:00
|
|
|
CV_Assert( !lmap.empty() );
|
2013-08-05 21:42:59 +08:00
|
|
|
|
|
|
|
for(int i =0;i<labels.rows;i++)
|
|
|
|
{
|
2013-10-04 20:32:12 +08:00
|
|
|
new_labels.at<int>(i,0) = lmap.find(labels.at<int>(i,0))->second;
|
2013-08-05 21:42:59 +08:00
|
|
|
}
|
|
|
|
return new_labels;
|
|
|
|
}
|
|
|
|
|
2014-08-14 23:01:45 +08:00
|
|
|
void LogisticRegressionImpl::clear()
|
2013-08-05 21:42:59 +08:00
|
|
|
{
|
|
|
|
this->learnt_thetas.release();
|
|
|
|
this->labels_o.release();
|
|
|
|
this->labels_n.release();
|
|
|
|
}
|
|
|
|
|
2014-08-14 23:01:45 +08:00
|
|
|
void LogisticRegressionImpl::write(FileStorage& fs) const
|
2013-08-05 21:42:59 +08:00
|
|
|
{
|
2013-11-05 18:34:45 +08:00
|
|
|
// check if open
|
|
|
|
if(fs.isOpened() == 0)
|
|
|
|
{
|
|
|
|
CV_Error(CV_StsBadArg,"file can't open. Check file path");
|
|
|
|
}
|
2016-03-23 06:19:42 +08:00
|
|
|
writeFormat(fs);
|
2017-01-17 18:57:50 +08:00
|
|
|
string desc = "Logistic Regression Classifier";
|
2013-10-04 20:32:12 +08:00
|
|
|
fs<<"classifier"<<desc.c_str();
|
|
|
|
fs<<"alpha"<<this->params.alpha;
|
|
|
|
fs<<"iterations"<<this->params.num_iters;
|
|
|
|
fs<<"norm"<<this->params.norm;
|
|
|
|
fs<<"train_method"<<this->params.train_method;
|
|
|
|
if(this->params.train_method == LogisticRegression::MINI_BATCH)
|
|
|
|
{
|
|
|
|
fs<<"mini_batch_size"<<this->params.mini_batch_size;
|
|
|
|
}
|
|
|
|
fs<<"learnt_thetas"<<this->learnt_thetas;
|
|
|
|
fs<<"n_labels"<<this->labels_n;
|
|
|
|
fs<<"o_labels"<<this->labels_o;
|
|
|
|
}
|
2013-08-05 21:42:59 +08:00
|
|
|
|
2014-08-14 23:01:45 +08:00
|
|
|
void LogisticRegressionImpl::read(const FileNode& fn)
|
2013-10-04 20:32:12 +08:00
|
|
|
{
|
|
|
|
// check if empty
|
|
|
|
if(fn.empty())
|
2013-08-05 21:42:59 +08:00
|
|
|
{
|
2013-11-05 18:34:45 +08:00
|
|
|
CV_Error( CV_StsBadArg, "empty FileNode object" );
|
2013-08-05 21:42:59 +08:00
|
|
|
}
|
|
|
|
|
2013-10-04 20:32:12 +08:00
|
|
|
this->params.alpha = (double)fn["alpha"];
|
|
|
|
this->params.num_iters = (int)fn["iterations"];
|
|
|
|
this->params.norm = (int)fn["norm"];
|
|
|
|
this->params.train_method = (int)fn["train_method"];
|
|
|
|
|
|
|
|
if(this->params.train_method == LogisticRegression::MINI_BATCH)
|
|
|
|
{
|
|
|
|
this->params.mini_batch_size = (int)fn["mini_batch_size"];
|
|
|
|
}
|
2013-08-05 21:42:59 +08:00
|
|
|
|
2013-10-04 20:32:12 +08:00
|
|
|
fn["learnt_thetas"] >> this->learnt_thetas;
|
|
|
|
fn["o_labels"] >> this->labels_o;
|
|
|
|
fn["n_labels"] >> this->labels_n;
|
2013-08-05 21:42:59 +08:00
|
|
|
|
|
|
|
for(int ii =0;ii<labels_o.rows;ii++)
|
|
|
|
{
|
|
|
|
this->forward_mapper[labels_o.at<int>(ii,0)] = labels_n.at<int>(ii,0);
|
|
|
|
this->reverse_mapper[labels_n.at<int>(ii,0)] = labels_o.at<int>(ii,0);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-08-14 23:01:45 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-08-05 21:42:59 +08:00
|
|
|
/* End of file. */
|