mirror of
https://github.com/opencv/opencv.git
synced 2025-06-11 20:09:23 +08:00
ml: apply CV_OVERRIDE/CV_FINAL
This commit is contained in:
parent
3314966acb
commit
4d0dd3e509
@ -318,7 +318,7 @@ public:
|
||||
/** @brief Returns the number of variables in training samples */
|
||||
CV_WRAP virtual int getVarCount() const = 0;
|
||||
|
||||
CV_WRAP virtual bool empty() const;
|
||||
CV_WRAP virtual bool empty() const CV_OVERRIDE;
|
||||
|
||||
/** @brief Returns true if the model is trained */
|
||||
CV_WRAP virtual bool isTrained() const = 0;
|
||||
@ -894,7 +894,7 @@ public:
|
||||
posterior probabilities for each sample from the input
|
||||
@param flags This parameter will be ignored
|
||||
*/
|
||||
CV_WRAP virtual float predict( InputArray samples, OutputArray results=noArray(), int flags=0 ) const = 0;
|
||||
CV_WRAP virtual float predict( InputArray samples, OutputArray results=noArray(), int flags=0 ) const CV_OVERRIDE = 0;
|
||||
|
||||
/** @brief Returns a likelihood logarithm value and an index of the most probable mixture component
|
||||
for the given sample.
|
||||
@ -1656,7 +1656,7 @@ public:
|
||||
@param results Predicted labels as a column matrix of type CV_32S.
|
||||
@param flags Not used.
|
||||
*/
|
||||
CV_WRAP virtual float predict( InputArray samples, OutputArray results=noArray(), int flags=0 ) const = 0;
|
||||
CV_WRAP virtual float predict( InputArray samples, OutputArray results=noArray(), int flags=0 ) const CV_OVERRIDE = 0;
|
||||
|
||||
/** @brief This function returns the trained parameters arranged across rows.
|
||||
|
||||
|
@ -213,7 +213,7 @@ void ANN_MLP::setAnnealEnergyRNG(const RNG& rng)
|
||||
this_->setAnnealEnergyRNG(rng);
|
||||
}
|
||||
|
||||
class ANN_MLPImpl : public ANN_MLP_ANNEAL
|
||||
class ANN_MLPImpl CV_FINAL : public ANN_MLP_ANNEAL
|
||||
{
|
||||
public:
|
||||
ANN_MLPImpl()
|
||||
@ -226,23 +226,34 @@ public:
|
||||
|
||||
virtual ~ANN_MLPImpl() {}
|
||||
|
||||
CV_IMPL_PROPERTY(TermCriteria, TermCriteria, params.termCrit)
|
||||
CV_IMPL_PROPERTY(double, BackpropWeightScale, params.bpDWScale)
|
||||
CV_IMPL_PROPERTY(double, BackpropMomentumScale, params.bpMomentScale)
|
||||
CV_IMPL_PROPERTY(double, RpropDW0, params.rpDW0)
|
||||
CV_IMPL_PROPERTY(double, RpropDWPlus, params.rpDWPlus)
|
||||
CV_IMPL_PROPERTY(double, RpropDWMinus, params.rpDWMinus)
|
||||
CV_IMPL_PROPERTY(double, RpropDWMin, params.rpDWMin)
|
||||
CV_IMPL_PROPERTY(double, RpropDWMax, params.rpDWMax)
|
||||
CV_IMPL_PROPERTY(double, AnnealInitialT, params.initialT)
|
||||
CV_IMPL_PROPERTY(double, AnnealFinalT, params.finalT)
|
||||
CV_IMPL_PROPERTY(double, AnnealCoolingRatio, params.coolingRatio)
|
||||
CV_IMPL_PROPERTY(int, AnnealItePerStep, params.itePerStep)
|
||||
inline TermCriteria getTermCriteria() const CV_OVERRIDE { return params.termCrit; }
|
||||
inline void setTermCriteria(TermCriteria val) CV_OVERRIDE { params.termCrit = val; }
|
||||
inline double getBackpropWeightScale() const CV_OVERRIDE { return params.bpDWScale; }
|
||||
inline void setBackpropWeightScale(double val) CV_OVERRIDE { params.bpDWScale = val; }
|
||||
inline double getBackpropMomentumScale() const CV_OVERRIDE { return params.bpMomentScale; }
|
||||
inline void setBackpropMomentumScale(double val) CV_OVERRIDE { params.bpMomentScale = val; }
|
||||
inline double getRpropDW0() const CV_OVERRIDE { return params.rpDW0; }
|
||||
inline void setRpropDW0(double val) CV_OVERRIDE { params.rpDW0 = val; }
|
||||
inline double getRpropDWPlus() const CV_OVERRIDE { return params.rpDWPlus; }
|
||||
inline void setRpropDWPlus(double val) CV_OVERRIDE { params.rpDWPlus = val; }
|
||||
inline double getRpropDWMinus() const CV_OVERRIDE { return params.rpDWMinus; }
|
||||
inline void setRpropDWMinus(double val) CV_OVERRIDE { params.rpDWMinus = val; }
|
||||
inline double getRpropDWMin() const CV_OVERRIDE { return params.rpDWMin; }
|
||||
inline void setRpropDWMin(double val) CV_OVERRIDE { params.rpDWMin = val; }
|
||||
inline double getRpropDWMax() const CV_OVERRIDE { return params.rpDWMax; }
|
||||
inline void setRpropDWMax(double val) CV_OVERRIDE { params.rpDWMax = val; }
|
||||
inline double getAnnealInitialT() const CV_OVERRIDE { return params.initialT; }
|
||||
inline void setAnnealInitialT(double val) CV_OVERRIDE { params.initialT = val; }
|
||||
inline double getAnnealFinalT() const CV_OVERRIDE { return params.finalT; }
|
||||
inline void setAnnealFinalT(double val) CV_OVERRIDE { params.finalT = val; }
|
||||
inline double getAnnealCoolingRatio() const CV_OVERRIDE { return params.coolingRatio; }
|
||||
inline void setAnnealCoolingRatio(double val) CV_OVERRIDE { params.coolingRatio = val; }
|
||||
inline int getAnnealItePerStep() const CV_OVERRIDE { return params.itePerStep; }
|
||||
inline void setAnnealItePerStep(int val) CV_OVERRIDE { params.itePerStep = val; }
|
||||
// disabled getAnnealEnergyRNG()
|
||||
inline void setAnnealEnergyRNG(const RNG& val) CV_OVERRIDE { params.rEnergy = val; }
|
||||
|
||||
//CV_IMPL_PROPERTY(RNG, AnnealEnergyRNG, params.rEnergy)
|
||||
inline void setAnnealEnergyRNG(const RNG& val) { params.rEnergy = val; }
|
||||
|
||||
void clear()
|
||||
void clear() CV_OVERRIDE
|
||||
{
|
||||
min_val = max_val = min_val1 = max_val1 = 0.;
|
||||
rng = RNG((uint64)-1);
|
||||
@ -253,7 +264,7 @@ public:
|
||||
|
||||
int layer_count() const { return (int)layer_sizes.size(); }
|
||||
|
||||
void setTrainMethod(int method, double param1, double param2)
|
||||
void setTrainMethod(int method, double param1, double param2) CV_OVERRIDE
|
||||
{
|
||||
if (method != ANN_MLP::RPROP && method != ANN_MLP::BACKPROP && method != ANN_MLP::ANNEAL)
|
||||
method = ANN_MLP::RPROP;
|
||||
@ -276,12 +287,12 @@ public:
|
||||
}
|
||||
}
|
||||
|
||||
int getTrainMethod() const
|
||||
int getTrainMethod() const CV_OVERRIDE
|
||||
{
|
||||
return params.trainMethod;
|
||||
}
|
||||
|
||||
void setActivationFunction(int _activ_func, double _f_param1, double _f_param2)
|
||||
void setActivationFunction(int _activ_func, double _f_param1, double _f_param2) CV_OVERRIDE
|
||||
{
|
||||
if( _activ_func < 0 || _activ_func > LEAKYRELU)
|
||||
CV_Error( CV_StsOutOfRange, "Unknown activation function" );
|
||||
@ -362,12 +373,12 @@ public:
|
||||
}
|
||||
}
|
||||
|
||||
Mat getLayerSizes() const
|
||||
Mat getLayerSizes() const CV_OVERRIDE
|
||||
{
|
||||
return Mat_<int>(layer_sizes, true);
|
||||
}
|
||||
|
||||
void setLayerSizes( InputArray _layer_sizes )
|
||||
void setLayerSizes( InputArray _layer_sizes ) CV_OVERRIDE
|
||||
{
|
||||
clear();
|
||||
|
||||
@ -399,7 +410,7 @@ public:
|
||||
}
|
||||
}
|
||||
|
||||
float predict( InputArray _inputs, OutputArray _outputs, int ) const
|
||||
float predict( InputArray _inputs, OutputArray _outputs, int ) const CV_OVERRIDE
|
||||
{
|
||||
if( !trained )
|
||||
CV_Error( CV_StsError, "The network has not been trained or loaded" );
|
||||
@ -907,7 +918,7 @@ public:
|
||||
calc_output_scale( outputs, flags );
|
||||
}
|
||||
|
||||
bool train( const Ptr<TrainData>& trainData, int flags )
|
||||
bool train( const Ptr<TrainData>& trainData, int flags ) CV_OVERRIDE
|
||||
{
|
||||
const int MAX_ITER = 1000;
|
||||
const double DEFAULT_EPSILON = FLT_EPSILON;
|
||||
@ -1108,7 +1119,7 @@ public:
|
||||
int dcount0;
|
||||
double* pE;
|
||||
|
||||
void operator()( const Range& range ) const
|
||||
void operator()(const Range& range) const CV_OVERRIDE
|
||||
{
|
||||
double inv_count = 1./inputs.rows;
|
||||
int ivcount = ann->layer_sizes.front();
|
||||
@ -1392,7 +1403,7 @@ public:
|
||||
fs << "}" << "}";
|
||||
}
|
||||
|
||||
void write( FileStorage& fs ) const
|
||||
void write( FileStorage& fs ) const CV_OVERRIDE
|
||||
{
|
||||
if( layer_sizes.empty() )
|
||||
return;
|
||||
@ -1502,7 +1513,7 @@ public:
|
||||
}
|
||||
}
|
||||
|
||||
void read( const FileNode& fn )
|
||||
void read( const FileNode& fn ) CV_OVERRIDE
|
||||
{
|
||||
clear();
|
||||
|
||||
@ -1531,28 +1542,28 @@ public:
|
||||
trained = true;
|
||||
}
|
||||
|
||||
Mat getWeights(int layerIdx) const
|
||||
Mat getWeights(int layerIdx) const CV_OVERRIDE
|
||||
{
|
||||
CV_Assert( 0 <= layerIdx && layerIdx < (int)weights.size() );
|
||||
return weights[layerIdx];
|
||||
}
|
||||
|
||||
bool isTrained() const
|
||||
bool isTrained() const CV_OVERRIDE
|
||||
{
|
||||
return trained;
|
||||
}
|
||||
|
||||
bool isClassifier() const
|
||||
bool isClassifier() const CV_OVERRIDE
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
int getVarCount() const
|
||||
int getVarCount() const CV_OVERRIDE
|
||||
{
|
||||
return layer_sizes.empty() ? 0 : layer_sizes[0];
|
||||
}
|
||||
|
||||
String getDefaultName() const
|
||||
String getDefaultName() const CV_OVERRIDE
|
||||
{
|
||||
return "opencv_ml_ann_mlp";
|
||||
}
|
||||
|
@ -69,7 +69,7 @@ BoostTreeParams::BoostTreeParams( int _boostType, int _weak_count,
|
||||
weightTrimRate = _weightTrimRate;
|
||||
}
|
||||
|
||||
class DTreesImplForBoost : public DTreesImpl
|
||||
class DTreesImplForBoost CV_FINAL : public DTreesImpl
|
||||
{
|
||||
public:
|
||||
DTreesImplForBoost()
|
||||
@ -79,14 +79,14 @@ public:
|
||||
}
|
||||
virtual ~DTreesImplForBoost() {}
|
||||
|
||||
bool isClassifier() const { return true; }
|
||||
bool isClassifier() const CV_OVERRIDE { return true; }
|
||||
|
||||
void clear()
|
||||
void clear() CV_OVERRIDE
|
||||
{
|
||||
DTreesImpl::clear();
|
||||
}
|
||||
|
||||
void startTraining( const Ptr<TrainData>& trainData, int flags )
|
||||
void startTraining( const Ptr<TrainData>& trainData, int flags ) CV_OVERRIDE
|
||||
{
|
||||
DTreesImpl::startTraining(trainData, flags);
|
||||
sumResult.assign(w->sidx.size(), 0.);
|
||||
@ -132,7 +132,7 @@ public:
|
||||
}
|
||||
}
|
||||
|
||||
void endTraining()
|
||||
void endTraining() CV_OVERRIDE
|
||||
{
|
||||
DTreesImpl::endTraining();
|
||||
vector<double> e;
|
||||
@ -167,7 +167,7 @@ public:
|
||||
}
|
||||
}
|
||||
|
||||
void calcValue( int nidx, const vector<int>& _sidx )
|
||||
void calcValue( int nidx, const vector<int>& _sidx ) CV_OVERRIDE
|
||||
{
|
||||
DTreesImpl::calcValue(nidx, _sidx);
|
||||
WNode* node = &w->wnodes[nidx];
|
||||
@ -182,7 +182,7 @@ public:
|
||||
}
|
||||
}
|
||||
|
||||
bool train( const Ptr<TrainData>& trainData, int flags )
|
||||
bool train( const Ptr<TrainData>& trainData, int flags ) CV_OVERRIDE
|
||||
{
|
||||
startTraining(trainData, flags);
|
||||
int treeidx, ntrees = bparams.weakCount >= 0 ? bparams.weakCount : 10000;
|
||||
@ -356,7 +356,7 @@ public:
|
||||
}
|
||||
}
|
||||
|
||||
float predictTrees( const Range& range, const Mat& sample, int flags0 ) const
|
||||
float predictTrees( const Range& range, const Mat& sample, int flags0 ) const CV_OVERRIDE
|
||||
{
|
||||
int flags = (flags0 & ~PREDICT_MASK) | PREDICT_SUM;
|
||||
float val = DTreesImpl::predictTrees(range, sample, flags);
|
||||
@ -370,7 +370,7 @@ public:
|
||||
return val;
|
||||
}
|
||||
|
||||
void writeTrainingParams( FileStorage& fs ) const
|
||||
void writeTrainingParams( FileStorage& fs ) const CV_OVERRIDE
|
||||
{
|
||||
fs << "boosting_type" <<
|
||||
(bparams.boostType == Boost::DISCRETE ? "DiscreteAdaboost" :
|
||||
@ -382,7 +382,7 @@ public:
|
||||
fs << "weight_trimming_rate" << bparams.weightTrimRate;
|
||||
}
|
||||
|
||||
void write( FileStorage& fs ) const
|
||||
void write( FileStorage& fs ) const CV_OVERRIDE
|
||||
{
|
||||
if( roots.empty() )
|
||||
CV_Error( CV_StsBadArg, "RTrees have not been trained" );
|
||||
@ -405,7 +405,7 @@ public:
|
||||
fs << "]";
|
||||
}
|
||||
|
||||
void readParams( const FileNode& fn )
|
||||
void readParams( const FileNode& fn ) CV_OVERRIDE
|
||||
{
|
||||
DTreesImpl::readParams(fn);
|
||||
|
||||
@ -423,7 +423,7 @@ public:
|
||||
tparams_node["weight_trimming_rate"] : fn["weight_trimming_rate"]);
|
||||
}
|
||||
|
||||
void read( const FileNode& fn )
|
||||
void read( const FileNode& fn ) CV_OVERRIDE
|
||||
{
|
||||
clear();
|
||||
|
||||
@ -452,51 +452,63 @@ public:
|
||||
BoostImpl() {}
|
||||
virtual ~BoostImpl() {}
|
||||
|
||||
CV_IMPL_PROPERTY(int, BoostType, impl.bparams.boostType)
|
||||
CV_IMPL_PROPERTY(int, WeakCount, impl.bparams.weakCount)
|
||||
CV_IMPL_PROPERTY(double, WeightTrimRate, impl.bparams.weightTrimRate)
|
||||
inline int getBoostType() const CV_OVERRIDE { return impl.bparams.boostType; }
|
||||
inline void setBoostType(int val) CV_OVERRIDE { impl.bparams.boostType = val; }
|
||||
inline int getWeakCount() const CV_OVERRIDE { return impl.bparams.weakCount; }
|
||||
inline void setWeakCount(int val) CV_OVERRIDE { impl.bparams.weakCount = val; }
|
||||
inline double getWeightTrimRate() const CV_OVERRIDE { return impl.bparams.weightTrimRate; }
|
||||
inline void setWeightTrimRate(double val) CV_OVERRIDE { impl.bparams.weightTrimRate = val; }
|
||||
|
||||
CV_WRAP_SAME_PROPERTY(int, MaxCategories, impl.params)
|
||||
CV_WRAP_SAME_PROPERTY(int, MaxDepth, impl.params)
|
||||
CV_WRAP_SAME_PROPERTY(int, MinSampleCount, impl.params)
|
||||
CV_WRAP_SAME_PROPERTY(int, CVFolds, impl.params)
|
||||
CV_WRAP_SAME_PROPERTY(bool, UseSurrogates, impl.params)
|
||||
CV_WRAP_SAME_PROPERTY(bool, Use1SERule, impl.params)
|
||||
CV_WRAP_SAME_PROPERTY(bool, TruncatePrunedTree, impl.params)
|
||||
CV_WRAP_SAME_PROPERTY(float, RegressionAccuracy, impl.params)
|
||||
CV_WRAP_SAME_PROPERTY_S(cv::Mat, Priors, impl.params)
|
||||
inline int getMaxCategories() const CV_OVERRIDE { return impl.params.getMaxCategories(); }
|
||||
inline void setMaxCategories(int val) CV_OVERRIDE { impl.params.setMaxCategories(val); }
|
||||
inline int getMaxDepth() const CV_OVERRIDE { return impl.params.getMaxDepth(); }
|
||||
inline void setMaxDepth(int val) CV_OVERRIDE { impl.params.setMaxDepth(val); }
|
||||
inline int getMinSampleCount() const CV_OVERRIDE { return impl.params.getMinSampleCount(); }
|
||||
inline void setMinSampleCount(int val) CV_OVERRIDE { impl.params.setMinSampleCount(val); }
|
||||
inline int getCVFolds() const CV_OVERRIDE { return impl.params.getCVFolds(); }
|
||||
inline void setCVFolds(int val) CV_OVERRIDE { impl.params.setCVFolds(val); }
|
||||
inline bool getUseSurrogates() const CV_OVERRIDE { return impl.params.getUseSurrogates(); }
|
||||
inline void setUseSurrogates(bool val) CV_OVERRIDE { impl.params.setUseSurrogates(val); }
|
||||
inline bool getUse1SERule() const CV_OVERRIDE { return impl.params.getUse1SERule(); }
|
||||
inline void setUse1SERule(bool val) CV_OVERRIDE { impl.params.setUse1SERule(val); }
|
||||
inline bool getTruncatePrunedTree() const CV_OVERRIDE { return impl.params.getTruncatePrunedTree(); }
|
||||
inline void setTruncatePrunedTree(bool val) CV_OVERRIDE { impl.params.setTruncatePrunedTree(val); }
|
||||
inline float getRegressionAccuracy() const CV_OVERRIDE { return impl.params.getRegressionAccuracy(); }
|
||||
inline void setRegressionAccuracy(float val) CV_OVERRIDE { impl.params.setRegressionAccuracy(val); }
|
||||
inline cv::Mat getPriors() const CV_OVERRIDE { return impl.params.getPriors(); }
|
||||
inline void setPriors(const cv::Mat& val) CV_OVERRIDE { impl.params.setPriors(val); }
|
||||
|
||||
String getDefaultName() const { return "opencv_ml_boost"; }
|
||||
String getDefaultName() const CV_OVERRIDE { return "opencv_ml_boost"; }
|
||||
|
||||
bool train( const Ptr<TrainData>& trainData, int flags )
|
||||
bool train( const Ptr<TrainData>& trainData, int flags ) CV_OVERRIDE
|
||||
{
|
||||
return impl.train(trainData, flags);
|
||||
}
|
||||
|
||||
float predict( InputArray samples, OutputArray results, int flags ) const
|
||||
float predict( InputArray samples, OutputArray results, int flags ) const CV_OVERRIDE
|
||||
{
|
||||
return impl.predict(samples, results, flags);
|
||||
}
|
||||
|
||||
void write( FileStorage& fs ) const
|
||||
void write( FileStorage& fs ) const CV_OVERRIDE
|
||||
{
|
||||
impl.write(fs);
|
||||
}
|
||||
|
||||
void read( const FileNode& fn )
|
||||
void read( const FileNode& fn ) CV_OVERRIDE
|
||||
{
|
||||
impl.read(fn);
|
||||
}
|
||||
|
||||
int getVarCount() const { return impl.getVarCount(); }
|
||||
int getVarCount() const CV_OVERRIDE { return impl.getVarCount(); }
|
||||
|
||||
bool isTrained() const { return impl.isTrained(); }
|
||||
bool isClassifier() const { return impl.isClassifier(); }
|
||||
bool isTrained() const CV_OVERRIDE { return impl.isTrained(); }
|
||||
bool isClassifier() const CV_OVERRIDE { return impl.isClassifier(); }
|
||||
|
||||
const vector<int>& getRoots() const { return impl.getRoots(); }
|
||||
const vector<Node>& getNodes() const { return impl.getNodes(); }
|
||||
const vector<Split>& getSplits() const { return impl.getSplits(); }
|
||||
const vector<int>& getSubsets() const { return impl.getSubsets(); }
|
||||
const vector<int>& getRoots() const CV_OVERRIDE { return impl.getRoots(); }
|
||||
const vector<Node>& getNodes() const CV_OVERRIDE { return impl.getNodes(); }
|
||||
const vector<Split>& getSplits() const CV_OVERRIDE { return impl.getSplits(); }
|
||||
const vector<int>& getSubsets() const CV_OVERRIDE { return impl.getSubsets(); }
|
||||
|
||||
DTreesImplForBoost impl;
|
||||
};
|
||||
|
@ -119,7 +119,7 @@ Mat TrainData::getSubVector(const Mat& vec, const Mat& idx)
|
||||
return subvec;
|
||||
}
|
||||
|
||||
class TrainDataImpl : public TrainData
|
||||
class TrainDataImpl CV_FINAL : public TrainData
|
||||
{
|
||||
public:
|
||||
typedef std::map<String, int> MapType;
|
||||
@ -132,75 +132,75 @@ public:
|
||||
|
||||
virtual ~TrainDataImpl() { closeFile(); }
|
||||
|
||||
int getLayout() const { return layout; }
|
||||
int getNSamples() const
|
||||
int getLayout() const CV_OVERRIDE { return layout; }
|
||||
int getNSamples() const CV_OVERRIDE
|
||||
{
|
||||
return !sampleIdx.empty() ? (int)sampleIdx.total() :
|
||||
layout == ROW_SAMPLE ? samples.rows : samples.cols;
|
||||
}
|
||||
int getNTrainSamples() const
|
||||
int getNTrainSamples() const CV_OVERRIDE
|
||||
{
|
||||
return !trainSampleIdx.empty() ? (int)trainSampleIdx.total() : getNSamples();
|
||||
}
|
||||
int getNTestSamples() const
|
||||
int getNTestSamples() const CV_OVERRIDE
|
||||
{
|
||||
return !testSampleIdx.empty() ? (int)testSampleIdx.total() : 0;
|
||||
}
|
||||
int getNVars() const
|
||||
int getNVars() const CV_OVERRIDE
|
||||
{
|
||||
return !varIdx.empty() ? (int)varIdx.total() : getNAllVars();
|
||||
}
|
||||
int getNAllVars() const
|
||||
int getNAllVars() const CV_OVERRIDE
|
||||
{
|
||||
return layout == ROW_SAMPLE ? samples.cols : samples.rows;
|
||||
}
|
||||
|
||||
Mat getSamples() const { return samples; }
|
||||
Mat getResponses() const { return responses; }
|
||||
Mat getMissing() const { return missing; }
|
||||
Mat getVarIdx() const { return varIdx; }
|
||||
Mat getVarType() const { return varType; }
|
||||
int getResponseType() const
|
||||
Mat getSamples() const CV_OVERRIDE { return samples; }
|
||||
Mat getResponses() const CV_OVERRIDE { return responses; }
|
||||
Mat getMissing() const CV_OVERRIDE { return missing; }
|
||||
Mat getVarIdx() const CV_OVERRIDE { return varIdx; }
|
||||
Mat getVarType() const CV_OVERRIDE { return varType; }
|
||||
int getResponseType() const CV_OVERRIDE
|
||||
{
|
||||
return classLabels.empty() ? VAR_ORDERED : VAR_CATEGORICAL;
|
||||
}
|
||||
Mat getTrainSampleIdx() const { return !trainSampleIdx.empty() ? trainSampleIdx : sampleIdx; }
|
||||
Mat getTestSampleIdx() const { return testSampleIdx; }
|
||||
Mat getSampleWeights() const
|
||||
Mat getTrainSampleIdx() const CV_OVERRIDE { return !trainSampleIdx.empty() ? trainSampleIdx : sampleIdx; }
|
||||
Mat getTestSampleIdx() const CV_OVERRIDE { return testSampleIdx; }
|
||||
Mat getSampleWeights() const CV_OVERRIDE
|
||||
{
|
||||
return sampleWeights;
|
||||
}
|
||||
Mat getTrainSampleWeights() const
|
||||
Mat getTrainSampleWeights() const CV_OVERRIDE
|
||||
{
|
||||
return getSubVector(sampleWeights, getTrainSampleIdx());
|
||||
}
|
||||
Mat getTestSampleWeights() const
|
||||
Mat getTestSampleWeights() const CV_OVERRIDE
|
||||
{
|
||||
Mat idx = getTestSampleIdx();
|
||||
return idx.empty() ? Mat() : getSubVector(sampleWeights, idx);
|
||||
}
|
||||
Mat getTrainResponses() const
|
||||
Mat getTrainResponses() const CV_OVERRIDE
|
||||
{
|
||||
return getSubVector(responses, getTrainSampleIdx());
|
||||
}
|
||||
Mat getTrainNormCatResponses() const
|
||||
Mat getTrainNormCatResponses() const CV_OVERRIDE
|
||||
{
|
||||
return getSubVector(normCatResponses, getTrainSampleIdx());
|
||||
}
|
||||
Mat getTestResponses() const
|
||||
Mat getTestResponses() const CV_OVERRIDE
|
||||
{
|
||||
Mat idx = getTestSampleIdx();
|
||||
return idx.empty() ? Mat() : getSubVector(responses, idx);
|
||||
}
|
||||
Mat getTestNormCatResponses() const
|
||||
Mat getTestNormCatResponses() const CV_OVERRIDE
|
||||
{
|
||||
Mat idx = getTestSampleIdx();
|
||||
return idx.empty() ? Mat() : getSubVector(normCatResponses, idx);
|
||||
}
|
||||
Mat getNormCatResponses() const { return normCatResponses; }
|
||||
Mat getClassLabels() const { return classLabels; }
|
||||
Mat getNormCatResponses() const CV_OVERRIDE { return normCatResponses; }
|
||||
Mat getClassLabels() const CV_OVERRIDE { return classLabels; }
|
||||
Mat getClassCounters() const { return classCounters; }
|
||||
int getCatCount(int vi) const
|
||||
int getCatCount(int vi) const CV_OVERRIDE
|
||||
{
|
||||
int n = (int)catOfs.total();
|
||||
CV_Assert( 0 <= vi && vi < n );
|
||||
@ -208,10 +208,10 @@ public:
|
||||
return ofs[1] - ofs[0];
|
||||
}
|
||||
|
||||
Mat getCatOfs() const { return catOfs; }
|
||||
Mat getCatMap() const { return catMap; }
|
||||
Mat getCatOfs() const CV_OVERRIDE { return catOfs; }
|
||||
Mat getCatMap() const CV_OVERRIDE { return catMap; }
|
||||
|
||||
Mat getDefaultSubstValues() const { return missingSubst; }
|
||||
Mat getDefaultSubstValues() const CV_OVERRIDE { return missingSubst; }
|
||||
|
||||
void closeFile() { if(file) fclose(file); file=0; }
|
||||
void clear()
|
||||
@ -767,13 +767,13 @@ public:
|
||||
CV_Error( CV_StsBadArg, "type of some variables is not specified" );
|
||||
}
|
||||
|
||||
void setTrainTestSplitRatio(double ratio, bool shuffle)
|
||||
void setTrainTestSplitRatio(double ratio, bool shuffle) CV_OVERRIDE
|
||||
{
|
||||
CV_Assert( 0. <= ratio && ratio <= 1. );
|
||||
setTrainTestSplit(cvRound(getNSamples()*ratio), shuffle);
|
||||
}
|
||||
|
||||
void setTrainTestSplit(int count, bool shuffle)
|
||||
void setTrainTestSplit(int count, bool shuffle) CV_OVERRIDE
|
||||
{
|
||||
int i, nsamples = getNSamples();
|
||||
CV_Assert( 0 <= count && count < nsamples );
|
||||
@ -810,7 +810,7 @@ public:
|
||||
}
|
||||
}
|
||||
|
||||
void shuffleTrainTest()
|
||||
void shuffleTrainTest() CV_OVERRIDE
|
||||
{
|
||||
if( !trainSampleIdx.empty() && !testSampleIdx.empty() )
|
||||
{
|
||||
@ -844,7 +844,7 @@ public:
|
||||
|
||||
Mat getTrainSamples(int _layout,
|
||||
bool compressSamples,
|
||||
bool compressVars) const
|
||||
bool compressVars) const CV_OVERRIDE
|
||||
{
|
||||
if( samples.empty() )
|
||||
return samples;
|
||||
@ -884,7 +884,7 @@ public:
|
||||
return dsamples;
|
||||
}
|
||||
|
||||
void getValues( int vi, InputArray _sidx, float* values ) const
|
||||
void getValues( int vi, InputArray _sidx, float* values ) const CV_OVERRIDE
|
||||
{
|
||||
Mat sidx = _sidx.getMat();
|
||||
int i, n = sidx.checkVector(1, CV_32S), nsamples = getNSamples();
|
||||
@ -914,7 +914,7 @@ public:
|
||||
}
|
||||
}
|
||||
|
||||
void getNormCatValues( int vi, InputArray _sidx, int* values ) const
|
||||
void getNormCatValues( int vi, InputArray _sidx, int* values ) const CV_OVERRIDE
|
||||
{
|
||||
float* fvalues = (float*)values;
|
||||
getValues(vi, _sidx, fvalues);
|
||||
@ -960,7 +960,7 @@ public:
|
||||
}
|
||||
}
|
||||
|
||||
void getSample(InputArray _vidx, int sidx, float* buf) const
|
||||
void getSample(InputArray _vidx, int sidx, float* buf) const CV_OVERRIDE
|
||||
{
|
||||
CV_Assert(buf != 0 && 0 <= sidx && sidx < getNSamples());
|
||||
Mat vidx = _vidx.getMat();
|
||||
|
@ -48,7 +48,7 @@ namespace ml
|
||||
|
||||
const double minEigenValue = DBL_EPSILON;
|
||||
|
||||
class CV_EXPORTS EMImpl : public EM
|
||||
class CV_EXPORTS EMImpl CV_FINAL : public EM
|
||||
{
|
||||
public:
|
||||
|
||||
@ -56,20 +56,21 @@ public:
|
||||
int covMatType;
|
||||
TermCriteria termCrit;
|
||||
|
||||
CV_IMPL_PROPERTY_S(TermCriteria, TermCriteria, termCrit)
|
||||
inline TermCriteria getTermCriteria() const CV_OVERRIDE { return termCrit; }
|
||||
inline void setTermCriteria(const TermCriteria& val) CV_OVERRIDE { termCrit = val; }
|
||||
|
||||
void setClustersNumber(int val)
|
||||
void setClustersNumber(int val) CV_OVERRIDE
|
||||
{
|
||||
nclusters = val;
|
||||
CV_Assert(nclusters >= 1);
|
||||
}
|
||||
|
||||
int getClustersNumber() const
|
||||
int getClustersNumber() const CV_OVERRIDE
|
||||
{
|
||||
return nclusters;
|
||||
}
|
||||
|
||||
void setCovarianceMatrixType(int val)
|
||||
void setCovarianceMatrixType(int val) CV_OVERRIDE
|
||||
{
|
||||
covMatType = val;
|
||||
CV_Assert(covMatType == COV_MAT_SPHERICAL ||
|
||||
@ -77,7 +78,7 @@ public:
|
||||
covMatType == COV_MAT_GENERIC);
|
||||
}
|
||||
|
||||
int getCovarianceMatrixType() const
|
||||
int getCovarianceMatrixType() const CV_OVERRIDE
|
||||
{
|
||||
return covMatType;
|
||||
}
|
||||
@ -91,7 +92,7 @@ public:
|
||||
|
||||
virtual ~EMImpl() {}
|
||||
|
||||
void clear()
|
||||
void clear() CV_OVERRIDE
|
||||
{
|
||||
trainSamples.release();
|
||||
trainProbs.release();
|
||||
@ -109,7 +110,7 @@ public:
|
||||
logWeightDivDet.release();
|
||||
}
|
||||
|
||||
bool train(const Ptr<TrainData>& data, int)
|
||||
bool train(const Ptr<TrainData>& data, int) CV_OVERRIDE
|
||||
{
|
||||
Mat samples = data->getTrainSamples(), labels;
|
||||
return trainEM(samples, labels, noArray(), noArray());
|
||||
@ -118,7 +119,7 @@ public:
|
||||
bool trainEM(InputArray samples,
|
||||
OutputArray logLikelihoods,
|
||||
OutputArray labels,
|
||||
OutputArray probs)
|
||||
OutputArray probs) CV_OVERRIDE
|
||||
{
|
||||
Mat samplesMat = samples.getMat();
|
||||
setTrainData(START_AUTO_STEP, samplesMat, 0, 0, 0, 0);
|
||||
@ -131,7 +132,7 @@ public:
|
||||
InputArray _weights0,
|
||||
OutputArray logLikelihoods,
|
||||
OutputArray labels,
|
||||
OutputArray probs)
|
||||
OutputArray probs) CV_OVERRIDE
|
||||
{
|
||||
Mat samplesMat = samples.getMat();
|
||||
std::vector<Mat> covs0;
|
||||
@ -148,7 +149,7 @@ public:
|
||||
InputArray _probs0,
|
||||
OutputArray logLikelihoods,
|
||||
OutputArray labels,
|
||||
OutputArray probs)
|
||||
OutputArray probs) CV_OVERRIDE
|
||||
{
|
||||
Mat samplesMat = samples.getMat();
|
||||
Mat probs0 = _probs0.getMat();
|
||||
@ -157,7 +158,7 @@ public:
|
||||
return doTrain(START_M_STEP, logLikelihoods, labels, probs);
|
||||
}
|
||||
|
||||
float predict(InputArray _inputs, OutputArray _outputs, int) const
|
||||
float predict(InputArray _inputs, OutputArray _outputs, int) const CV_OVERRIDE
|
||||
{
|
||||
bool needprobs = _outputs.needed();
|
||||
Mat samples = _inputs.getMat(), probs, probsrow;
|
||||
@ -186,7 +187,7 @@ public:
|
||||
return firstres;
|
||||
}
|
||||
|
||||
Vec2d predict2(InputArray _sample, OutputArray _probs) const
|
||||
Vec2d predict2(InputArray _sample, OutputArray _probs) const CV_OVERRIDE
|
||||
{
|
||||
int ptype = CV_64F;
|
||||
Mat sample = _sample.getMat();
|
||||
@ -213,22 +214,22 @@ public:
|
||||
return computeProbabilities(sample, !probs.empty() ? &probs : 0, ptype);
|
||||
}
|
||||
|
||||
bool isTrained() const
|
||||
bool isTrained() const CV_OVERRIDE
|
||||
{
|
||||
return !means.empty();
|
||||
}
|
||||
|
||||
bool isClassifier() const
|
||||
bool isClassifier() const CV_OVERRIDE
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
int getVarCount() const
|
||||
int getVarCount() const CV_OVERRIDE
|
||||
{
|
||||
return means.cols;
|
||||
}
|
||||
|
||||
String getDefaultName() const
|
||||
String getDefaultName() const CV_OVERRIDE
|
||||
{
|
||||
return "opencv_ml_em";
|
||||
}
|
||||
@ -768,7 +769,7 @@ public:
|
||||
writeTermCrit(fs, termCrit);
|
||||
}
|
||||
|
||||
void write(FileStorage& fs) const
|
||||
void write(FileStorage& fs) const CV_OVERRIDE
|
||||
{
|
||||
writeFormat(fs);
|
||||
fs << "training_params" << "{";
|
||||
@ -796,7 +797,7 @@ public:
|
||||
termCrit = readTermCrit(fn);
|
||||
}
|
||||
|
||||
void read(const FileNode& fn)
|
||||
void read(const FileNode& fn) CV_OVERRIDE
|
||||
{
|
||||
clear();
|
||||
read_params(fn["training_params"]);
|
||||
@ -816,9 +817,9 @@ public:
|
||||
computeLogWeightDivDet();
|
||||
}
|
||||
|
||||
Mat getWeights() const { return weights; }
|
||||
Mat getMeans() const { return means; }
|
||||
void getCovs(std::vector<Mat>& _covs) const
|
||||
Mat getWeights() const CV_OVERRIDE { return weights; }
|
||||
Mat getMeans() const CV_OVERRIDE { return means; }
|
||||
void getCovs(std::vector<Mat>& _covs) const CV_OVERRIDE
|
||||
{
|
||||
_covs.resize(covs.size());
|
||||
std::copy(covs.begin(), covs.end(), _covs.begin());
|
||||
|
@ -89,7 +89,7 @@ public:
|
||||
errStrip(e)
|
||||
{
|
||||
}
|
||||
virtual void operator()(const Range& range) const
|
||||
virtual void operator()(const Range& range) const CV_OVERRIDE
|
||||
{
|
||||
int idxErr = range.start;
|
||||
CV_TRACE_FUNCTION_SKIP_NESTED();
|
||||
|
@ -134,11 +134,11 @@ public:
|
||||
Mat responses;
|
||||
};
|
||||
|
||||
class BruteForceImpl : public Impl
|
||||
class BruteForceImpl CV_FINAL : public Impl
|
||||
{
|
||||
public:
|
||||
String getModelName() const { return NAME_BRUTE_FORCE; }
|
||||
int getType() const { return ml::KNearest::BRUTE_FORCE; }
|
||||
String getModelName() const CV_OVERRIDE { return NAME_BRUTE_FORCE; }
|
||||
int getType() const CV_OVERRIDE { return ml::KNearest::BRUTE_FORCE; }
|
||||
|
||||
void findNearestCore( const Mat& _samples, int k0, const Range& range,
|
||||
Mat* results, Mat* neighbor_responses,
|
||||
@ -294,7 +294,7 @@ public:
|
||||
presult = _presult;
|
||||
}
|
||||
|
||||
void operator()( const Range& range ) const
|
||||
void operator()(const Range& range) const CV_OVERRIDE
|
||||
{
|
||||
int delta = std::min(range.end - range.start, 256);
|
||||
for( int start = range.start; start < range.end; start += delta )
|
||||
@ -316,7 +316,7 @@ public:
|
||||
float findNearest( InputArray _samples, int k,
|
||||
OutputArray _results,
|
||||
OutputArray _neighborResponses,
|
||||
OutputArray _dists ) const
|
||||
OutputArray _dists ) const CV_OVERRIDE
|
||||
{
|
||||
float result = 0.f;
|
||||
CV_Assert( 0 < k );
|
||||
@ -358,13 +358,13 @@ public:
|
||||
};
|
||||
|
||||
|
||||
class KDTreeImpl : public Impl
|
||||
class KDTreeImpl CV_FINAL : public Impl
|
||||
{
|
||||
public:
|
||||
String getModelName() const { return NAME_KDTREE; }
|
||||
int getType() const { return ml::KNearest::KDTREE; }
|
||||
String getModelName() const CV_OVERRIDE { return NAME_KDTREE; }
|
||||
int getType() const CV_OVERRIDE { return ml::KNearest::KDTREE; }
|
||||
|
||||
void doTrain(InputArray points)
|
||||
void doTrain(InputArray points) CV_OVERRIDE
|
||||
{
|
||||
tr.build(points);
|
||||
}
|
||||
@ -372,7 +372,7 @@ public:
|
||||
float findNearest( InputArray _samples, int k,
|
||||
OutputArray _results,
|
||||
OutputArray _neighborResponses,
|
||||
OutputArray _dists ) const
|
||||
OutputArray _dists ) const CV_OVERRIDE
|
||||
{
|
||||
float result = 0.f;
|
||||
CV_Assert( 0 < k );
|
||||
@ -432,18 +432,21 @@ public:
|
||||
|
||||
//================================================================
|
||||
|
||||
class KNearestImpl : public KNearest
|
||||
class KNearestImpl CV_FINAL : public KNearest
|
||||
{
|
||||
CV_IMPL_PROPERTY(int, DefaultK, impl->defaultK)
|
||||
CV_IMPL_PROPERTY(bool, IsClassifier, impl->isclassifier)
|
||||
CV_IMPL_PROPERTY(int, Emax, impl->Emax)
|
||||
inline int getDefaultK() const CV_OVERRIDE { return impl->defaultK; }
|
||||
inline void setDefaultK(int val) CV_OVERRIDE { impl->defaultK = val; }
|
||||
inline bool getIsClassifier() const CV_OVERRIDE { return impl->isclassifier; }
|
||||
inline void setIsClassifier(bool val) CV_OVERRIDE { impl->isclassifier = val; }
|
||||
inline int getEmax() const CV_OVERRIDE { return impl->Emax; }
|
||||
inline void setEmax(int val) CV_OVERRIDE { impl->Emax = val; }
|
||||
|
||||
public:
|
||||
int getAlgorithmType() const
|
||||
int getAlgorithmType() const CV_OVERRIDE
|
||||
{
|
||||
return impl->getType();
|
||||
}
|
||||
void setAlgorithmType(int val)
|
||||
void setAlgorithmType(int val) CV_OVERRIDE
|
||||
{
|
||||
if (val != BRUTE_FORCE && val != KDTREE)
|
||||
val = BRUTE_FORCE;
|
||||
@ -459,18 +462,18 @@ public:
|
||||
{
|
||||
}
|
||||
|
||||
bool isClassifier() const { return impl->isclassifier; }
|
||||
bool isTrained() const { return !impl->samples.empty(); }
|
||||
bool isClassifier() const CV_OVERRIDE { return impl->isclassifier; }
|
||||
bool isTrained() const CV_OVERRIDE { return !impl->samples.empty(); }
|
||||
|
||||
int getVarCount() const { return impl->samples.cols; }
|
||||
int getVarCount() const CV_OVERRIDE { return impl->samples.cols; }
|
||||
|
||||
void write( FileStorage& fs ) const
|
||||
void write( FileStorage& fs ) const CV_OVERRIDE
|
||||
{
|
||||
writeFormat(fs);
|
||||
impl->write(fs);
|
||||
}
|
||||
|
||||
void read( const FileNode& fn )
|
||||
void read( const FileNode& fn ) CV_OVERRIDE
|
||||
{
|
||||
int algorithmType = BRUTE_FORCE;
|
||||
if (fn.name() == NAME_KDTREE)
|
||||
@ -482,22 +485,22 @@ public:
|
||||
float findNearest( InputArray samples, int k,
|
||||
OutputArray results,
|
||||
OutputArray neighborResponses=noArray(),
|
||||
OutputArray dist=noArray() ) const
|
||||
OutputArray dist=noArray() ) const CV_OVERRIDE
|
||||
{
|
||||
return impl->findNearest(samples, k, results, neighborResponses, dist);
|
||||
}
|
||||
|
||||
float predict(InputArray inputs, OutputArray outputs, int) const
|
||||
float predict(InputArray inputs, OutputArray outputs, int) const CV_OVERRIDE
|
||||
{
|
||||
return impl->findNearest( inputs, impl->defaultK, outputs, noArray(), noArray() );
|
||||
}
|
||||
|
||||
bool train( const Ptr<TrainData>& data, int flags )
|
||||
bool train( const Ptr<TrainData>& data, int flags ) CV_OVERRIDE
|
||||
{
|
||||
return impl->train(data, flags);
|
||||
}
|
||||
|
||||
String getDefaultName() const { return impl->getModelName(); }
|
||||
String getDefaultName() const CV_OVERRIDE { return impl->getModelName(); }
|
||||
|
||||
protected:
|
||||
void initImpl(int algorithmType)
|
||||
|
@ -81,30 +81,36 @@ public:
|
||||
TermCriteria term_crit;
|
||||
};
|
||||
|
||||
class LogisticRegressionImpl : public LogisticRegression
|
||||
class LogisticRegressionImpl CV_FINAL : public LogisticRegression
|
||||
{
|
||||
public:
|
||||
|
||||
LogisticRegressionImpl() { }
|
||||
virtual ~LogisticRegressionImpl() {}
|
||||
|
||||
CV_IMPL_PROPERTY(double, LearningRate, params.alpha)
|
||||
CV_IMPL_PROPERTY(int, Iterations, params.num_iters)
|
||||
CV_IMPL_PROPERTY(int, Regularization, params.norm)
|
||||
CV_IMPL_PROPERTY(int, TrainMethod, params.train_method)
|
||||
CV_IMPL_PROPERTY(int, MiniBatchSize, params.mini_batch_size)
|
||||
CV_IMPL_PROPERTY(TermCriteria, TermCriteria, params.term_crit)
|
||||
inline double getLearningRate() const CV_OVERRIDE { return params.alpha; }
|
||||
inline void setLearningRate(double val) CV_OVERRIDE { params.alpha = val; }
|
||||
inline int getIterations() const CV_OVERRIDE { return params.num_iters; }
|
||||
inline void setIterations(int val) CV_OVERRIDE { params.num_iters = val; }
|
||||
inline int getRegularization() const CV_OVERRIDE { return params.norm; }
|
||||
inline void setRegularization(int val) CV_OVERRIDE { params.norm = val; }
|
||||
inline int getTrainMethod() const CV_OVERRIDE { return params.train_method; }
|
||||
inline void setTrainMethod(int val) CV_OVERRIDE { params.train_method = val; }
|
||||
inline int getMiniBatchSize() const CV_OVERRIDE { return params.mini_batch_size; }
|
||||
inline void setMiniBatchSize(int val) CV_OVERRIDE { params.mini_batch_size = val; }
|
||||
inline TermCriteria getTermCriteria() const CV_OVERRIDE { return params.term_crit; }
|
||||
inline void setTermCriteria(TermCriteria val) CV_OVERRIDE { params.term_crit = val; }
|
||||
|
||||
virtual bool train( const Ptr<TrainData>& trainData, int=0 );
|
||||
virtual float predict(InputArray samples, OutputArray results, int flags=0) const;
|
||||
virtual void clear();
|
||||
virtual void write(FileStorage& fs) const;
|
||||
virtual void read(const FileNode& fn);
|
||||
virtual Mat get_learnt_thetas() const { return learnt_thetas; }
|
||||
virtual int getVarCount() const { return learnt_thetas.cols; }
|
||||
virtual bool isTrained() const { return !learnt_thetas.empty(); }
|
||||
virtual bool isClassifier() const { return true; }
|
||||
virtual String getDefaultName() const { return "opencv_ml_lr"; }
|
||||
virtual bool train( const Ptr<TrainData>& trainData, int=0 ) CV_OVERRIDE;
|
||||
virtual float predict(InputArray samples, OutputArray results, int flags=0) const CV_OVERRIDE;
|
||||
virtual void clear() CV_OVERRIDE;
|
||||
virtual void write(FileStorage& fs) const CV_OVERRIDE;
|
||||
virtual void read(const FileNode& fn) CV_OVERRIDE;
|
||||
virtual Mat get_learnt_thetas() const CV_OVERRIDE { return learnt_thetas; }
|
||||
virtual int getVarCount() const CV_OVERRIDE { return learnt_thetas.cols; }
|
||||
virtual bool isTrained() const CV_OVERRIDE { return !learnt_thetas.empty(); }
|
||||
virtual bool isClassifier() const CV_OVERRIDE { return true; }
|
||||
virtual String getDefaultName() const CV_OVERRIDE { return "opencv_ml_lr"; }
|
||||
protected:
|
||||
Mat calc_sigmoid(const Mat& data) const;
|
||||
double compute_cost(const Mat& _data, const Mat& _labels, const Mat& _init_theta);
|
||||
@ -392,7 +398,7 @@ struct LogisticRegressionImpl_ComputeDradient_Impl : ParallelLoopBody
|
||||
|
||||
}
|
||||
|
||||
void operator()(const cv::Range& r) const
|
||||
void operator()(const cv::Range& r) const CV_OVERRIDE
|
||||
{
|
||||
const Mat& _data = *data;
|
||||
const Mat &_theta = *theta;
|
||||
|
@ -52,7 +52,7 @@ public:
|
||||
nallvars = 0;
|
||||
}
|
||||
|
||||
bool train( const Ptr<TrainData>& trainData, int flags )
|
||||
bool train( const Ptr<TrainData>& trainData, int flags ) CV_OVERRIDE
|
||||
{
|
||||
const float min_variation = FLT_EPSILON;
|
||||
Mat responses = trainData->getNormCatResponses();
|
||||
@ -219,7 +219,7 @@ public:
|
||||
float* value;
|
||||
bool rawOutput;
|
||||
|
||||
void operator()( const Range& range ) const
|
||||
void operator()(const Range& range) const CV_OVERRIDE
|
||||
{
|
||||
int cls = -1;
|
||||
int rtype = 0, rptype = 0;
|
||||
@ -298,12 +298,12 @@ public:
|
||||
}
|
||||
};
|
||||
|
||||
float predict( InputArray _samples, OutputArray _results, int flags ) const
|
||||
float predict( InputArray _samples, OutputArray _results, int flags ) const CV_OVERRIDE
|
||||
{
|
||||
return predictProb(_samples, _results, noArray(), flags);
|
||||
}
|
||||
|
||||
float predictProb( InputArray _samples, OutputArray _results, OutputArray _resultsProb, int flags ) const
|
||||
float predictProb( InputArray _samples, OutputArray _results, OutputArray _resultsProb, int flags ) const CV_OVERRIDE
|
||||
{
|
||||
int value=0;
|
||||
Mat samples = _samples.getMat(), results, resultsProb;
|
||||
@ -339,7 +339,7 @@ public:
|
||||
return (float)value;
|
||||
}
|
||||
|
||||
void write( FileStorage& fs ) const
|
||||
void write( FileStorage& fs ) const CV_OVERRIDE
|
||||
{
|
||||
int nclasses = (int)cls_labels.total(), i;
|
||||
|
||||
@ -380,7 +380,7 @@ public:
|
||||
fs << "c" << c;
|
||||
}
|
||||
|
||||
void read( const FileNode& fn )
|
||||
void read( const FileNode& fn ) CV_OVERRIDE
|
||||
{
|
||||
clear();
|
||||
|
||||
@ -427,7 +427,7 @@ public:
|
||||
fn["c"] >> c;
|
||||
}
|
||||
|
||||
void clear()
|
||||
void clear() CV_OVERRIDE
|
||||
{
|
||||
count.clear();
|
||||
sum.clear();
|
||||
@ -442,10 +442,10 @@ public:
|
||||
nallvars = 0;
|
||||
}
|
||||
|
||||
bool isTrained() const { return !avg.empty(); }
|
||||
bool isClassifier() const { return true; }
|
||||
int getVarCount() const { return nallvars; }
|
||||
String getDefaultName() const { return "opencv_ml_nbayes"; }
|
||||
bool isTrained() const CV_OVERRIDE { return !avg.empty(); }
|
||||
bool isClassifier() const CV_OVERRIDE { return true; }
|
||||
int getVarCount() const CV_OVERRIDE { return nallvars; }
|
||||
String getDefaultName() const CV_OVERRIDE { return "opencv_ml_nbayes"; }
|
||||
|
||||
int nallvars;
|
||||
Mat var_idx, cls_labels, c;
|
||||
|
@ -173,13 +173,17 @@ namespace ml
|
||||
inline int getCVFolds() const { return CVFolds; }
|
||||
inline float getRegressionAccuracy() const { return regressionAccuracy; }
|
||||
|
||||
CV_IMPL_PROPERTY(bool, UseSurrogates, useSurrogates)
|
||||
CV_IMPL_PROPERTY(bool, Use1SERule, use1SERule)
|
||||
CV_IMPL_PROPERTY(bool, TruncatePrunedTree, truncatePrunedTree)
|
||||
CV_IMPL_PROPERTY_S(cv::Mat, Priors, priors)
|
||||
inline bool getUseSurrogates() const { return useSurrogates; }
|
||||
inline void setUseSurrogates(bool val) { useSurrogates = val; }
|
||||
inline bool getUse1SERule() const { return use1SERule; }
|
||||
inline void setUse1SERule(bool val) { use1SERule = val; }
|
||||
inline bool getTruncatePrunedTree() const { return truncatePrunedTree; }
|
||||
inline void setTruncatePrunedTree(bool val) { truncatePrunedTree = val; }
|
||||
inline cv::Mat getPriors() const { return priors; }
|
||||
inline void setPriors(const cv::Mat& val) { priors = val; }
|
||||
|
||||
public:
|
||||
bool useSurrogates;
|
||||
bool useSurrogates;
|
||||
bool use1SERule;
|
||||
bool truncatePrunedTree;
|
||||
Mat priors;
|
||||
@ -281,24 +285,33 @@ namespace ml
|
||||
int maxSubsetSize;
|
||||
};
|
||||
|
||||
CV_WRAP_SAME_PROPERTY(int, MaxCategories, params)
|
||||
CV_WRAP_SAME_PROPERTY(int, MaxDepth, params)
|
||||
CV_WRAP_SAME_PROPERTY(int, MinSampleCount, params)
|
||||
CV_WRAP_SAME_PROPERTY(int, CVFolds, params)
|
||||
CV_WRAP_SAME_PROPERTY(bool, UseSurrogates, params)
|
||||
CV_WRAP_SAME_PROPERTY(bool, Use1SERule, params)
|
||||
CV_WRAP_SAME_PROPERTY(bool, TruncatePrunedTree, params)
|
||||
CV_WRAP_SAME_PROPERTY(float, RegressionAccuracy, params)
|
||||
CV_WRAP_SAME_PROPERTY_S(cv::Mat, Priors, params)
|
||||
inline int getMaxCategories() const CV_OVERRIDE { return params.getMaxCategories(); }
|
||||
inline void setMaxCategories(int val) CV_OVERRIDE { params.setMaxCategories(val); }
|
||||
inline int getMaxDepth() const CV_OVERRIDE { return params.getMaxDepth(); }
|
||||
inline void setMaxDepth(int val) CV_OVERRIDE { params.setMaxDepth(val); }
|
||||
inline int getMinSampleCount() const CV_OVERRIDE { return params.getMinSampleCount(); }
|
||||
inline void setMinSampleCount(int val) CV_OVERRIDE { params.setMinSampleCount(val); }
|
||||
inline int getCVFolds() const CV_OVERRIDE { return params.getCVFolds(); }
|
||||
inline void setCVFolds(int val) CV_OVERRIDE { params.setCVFolds(val); }
|
||||
inline bool getUseSurrogates() const CV_OVERRIDE { return params.getUseSurrogates(); }
|
||||
inline void setUseSurrogates(bool val) CV_OVERRIDE { params.setUseSurrogates(val); }
|
||||
inline bool getUse1SERule() const CV_OVERRIDE { return params.getUse1SERule(); }
|
||||
inline void setUse1SERule(bool val) CV_OVERRIDE { params.setUse1SERule(val); }
|
||||
inline bool getTruncatePrunedTree() const CV_OVERRIDE { return params.getTruncatePrunedTree(); }
|
||||
inline void setTruncatePrunedTree(bool val) CV_OVERRIDE { params.setTruncatePrunedTree(val); }
|
||||
inline float getRegressionAccuracy() const CV_OVERRIDE { return params.getRegressionAccuracy(); }
|
||||
inline void setRegressionAccuracy(float val) CV_OVERRIDE { params.setRegressionAccuracy(val); }
|
||||
inline cv::Mat getPriors() const CV_OVERRIDE { return params.getPriors(); }
|
||||
inline void setPriors(const cv::Mat& val) CV_OVERRIDE { params.setPriors(val); }
|
||||
|
||||
DTreesImpl();
|
||||
virtual ~DTreesImpl();
|
||||
virtual void clear();
|
||||
virtual ~DTreesImpl() CV_OVERRIDE;
|
||||
virtual void clear() CV_OVERRIDE;
|
||||
|
||||
String getDefaultName() const { return "opencv_ml_dtree"; }
|
||||
bool isTrained() const { return !roots.empty(); }
|
||||
bool isClassifier() const { return _isClassifier; }
|
||||
int getVarCount() const { return varType.empty() ? 0 : (int)(varType.size() - 1); }
|
||||
String getDefaultName() const CV_OVERRIDE { return "opencv_ml_dtree"; }
|
||||
bool isTrained() const CV_OVERRIDE { return !roots.empty(); }
|
||||
bool isClassifier() const CV_OVERRIDE { return _isClassifier; }
|
||||
int getVarCount() const CV_OVERRIDE { return varType.empty() ? 0 : (int)(varType.size() - 1); }
|
||||
int getCatCount(int vi) const { return catOfs[vi][1] - catOfs[vi][0]; }
|
||||
int getSubsetSize(int vi) const { return (getCatCount(vi) + 31)/32; }
|
||||
|
||||
@ -306,7 +319,7 @@ namespace ml
|
||||
virtual void startTraining( const Ptr<TrainData>& trainData, int flags );
|
||||
virtual void endTraining();
|
||||
virtual void initCompVarIdx();
|
||||
virtual bool train( const Ptr<TrainData>& trainData, int flags );
|
||||
virtual bool train( const Ptr<TrainData>& trainData, int flags ) CV_OVERRIDE;
|
||||
|
||||
virtual int addTree( const vector<int>& sidx );
|
||||
virtual int addNodeAndTrySplit( int parent, const vector<int>& sidx );
|
||||
@ -329,25 +342,25 @@ namespace ml
|
||||
virtual double updateTreeRNC( int root, double T, int fold );
|
||||
virtual bool cutTree( int root, double T, int fold, double min_alpha );
|
||||
virtual float predictTrees( const Range& range, const Mat& sample, int flags ) const;
|
||||
virtual float predict( InputArray inputs, OutputArray outputs, int flags ) const;
|
||||
virtual float predict( InputArray inputs, OutputArray outputs, int flags ) const CV_OVERRIDE;
|
||||
|
||||
virtual void writeTrainingParams( FileStorage& fs ) const;
|
||||
virtual void writeParams( FileStorage& fs ) const;
|
||||
virtual void writeSplit( FileStorage& fs, int splitidx ) const;
|
||||
virtual void writeNode( FileStorage& fs, int nidx, int depth ) const;
|
||||
virtual void writeTree( FileStorage& fs, int root ) const;
|
||||
virtual void write( FileStorage& fs ) const;
|
||||
virtual void write( FileStorage& fs ) const CV_OVERRIDE;
|
||||
|
||||
virtual void readParams( const FileNode& fn );
|
||||
virtual int readSplit( const FileNode& fn );
|
||||
virtual int readNode( const FileNode& fn );
|
||||
virtual int readTree( const FileNode& fn );
|
||||
virtual void read( const FileNode& fn );
|
||||
virtual void read( const FileNode& fn ) CV_OVERRIDE;
|
||||
|
||||
virtual const std::vector<int>& getRoots() const { return roots; }
|
||||
virtual const std::vector<Node>& getNodes() const { return nodes; }
|
||||
virtual const std::vector<Split>& getSplits() const { return splits; }
|
||||
virtual const std::vector<int>& getSubsets() const { return subsets; }
|
||||
virtual const std::vector<int>& getRoots() const CV_OVERRIDE { return roots; }
|
||||
virtual const std::vector<Node>& getNodes() const CV_OVERRIDE { return nodes; }
|
||||
virtual const std::vector<Split>& getSplits() const CV_OVERRIDE { return splits; }
|
||||
virtual const std::vector<int>& getSubsets() const CV_OVERRIDE { return subsets; }
|
||||
|
||||
TreeParams params;
|
||||
|
||||
|
@ -66,7 +66,7 @@ RTreeParams::RTreeParams(bool _calcVarImportance,
|
||||
}
|
||||
|
||||
|
||||
class DTreesImplForRTrees : public DTreesImpl
|
||||
class DTreesImplForRTrees CV_FINAL : public DTreesImpl
|
||||
{
|
||||
public:
|
||||
DTreesImplForRTrees()
|
||||
@ -85,7 +85,7 @@ public:
|
||||
}
|
||||
virtual ~DTreesImplForRTrees() {}
|
||||
|
||||
void clear()
|
||||
void clear() CV_OVERRIDE
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
DTreesImpl::clear();
|
||||
@ -93,7 +93,7 @@ public:
|
||||
rng = RNG((uint64)-1);
|
||||
}
|
||||
|
||||
const vector<int>& getActiveVars()
|
||||
const vector<int>& getActiveVars() CV_OVERRIDE
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
int i, nvars = (int)allVars.size(), m = (int)activeVars.size();
|
||||
@ -108,7 +108,7 @@ public:
|
||||
return activeVars;
|
||||
}
|
||||
|
||||
void startTraining( const Ptr<TrainData>& trainData, int flags )
|
||||
void startTraining( const Ptr<TrainData>& trainData, int flags ) CV_OVERRIDE
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
DTreesImpl::startTraining(trainData, flags);
|
||||
@ -121,7 +121,7 @@ public:
|
||||
allVars[i] = varIdx[i];
|
||||
}
|
||||
|
||||
void endTraining()
|
||||
void endTraining() CV_OVERRIDE
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
DTreesImpl::endTraining();
|
||||
@ -130,7 +130,7 @@ public:
|
||||
std::swap(activeVars, b);
|
||||
}
|
||||
|
||||
bool train( const Ptr<TrainData>& trainData, int flags )
|
||||
bool train( const Ptr<TrainData>& trainData, int flags ) CV_OVERRIDE
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
startTraining(trainData, flags);
|
||||
@ -293,14 +293,14 @@ public:
|
||||
return true;
|
||||
}
|
||||
|
||||
void writeTrainingParams( FileStorage& fs ) const
|
||||
void writeTrainingParams( FileStorage& fs ) const CV_OVERRIDE
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
DTreesImpl::writeTrainingParams(fs);
|
||||
fs << "nactive_vars" << rparams.nactiveVars;
|
||||
}
|
||||
|
||||
void write( FileStorage& fs ) const
|
||||
void write( FileStorage& fs ) const CV_OVERRIDE
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
if( roots.empty() )
|
||||
@ -328,7 +328,7 @@ public:
|
||||
fs << "]";
|
||||
}
|
||||
|
||||
void readParams( const FileNode& fn )
|
||||
void readParams( const FileNode& fn ) CV_OVERRIDE
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
DTreesImpl::readParams(fn);
|
||||
@ -337,7 +337,7 @@ public:
|
||||
rparams.nactiveVars = (int)tparams_node["nactive_vars"];
|
||||
}
|
||||
|
||||
void read( const FileNode& fn )
|
||||
void read( const FileNode& fn ) CV_OVERRIDE
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
clear();
|
||||
@ -425,29 +425,41 @@ public:
|
||||
};
|
||||
|
||||
|
||||
class RTreesImpl : public RTrees
|
||||
class RTreesImpl CV_FINAL : public RTrees
|
||||
{
|
||||
public:
|
||||
CV_IMPL_PROPERTY(bool, CalculateVarImportance, impl.rparams.calcVarImportance)
|
||||
CV_IMPL_PROPERTY(int, ActiveVarCount, impl.rparams.nactiveVars)
|
||||
CV_IMPL_PROPERTY_S(TermCriteria, TermCriteria, impl.rparams.termCrit)
|
||||
inline bool getCalculateVarImportance() const CV_OVERRIDE { return impl.rparams.calcVarImportance; }
|
||||
inline void setCalculateVarImportance(bool val) CV_OVERRIDE { impl.rparams.calcVarImportance = val; }
|
||||
inline int getActiveVarCount() const CV_OVERRIDE { return impl.rparams.nactiveVars; }
|
||||
inline void setActiveVarCount(int val) CV_OVERRIDE { impl.rparams.nactiveVars = val; }
|
||||
inline TermCriteria getTermCriteria() const CV_OVERRIDE { return impl.rparams.termCrit; }
|
||||
inline void setTermCriteria(const TermCriteria& val) CV_OVERRIDE { impl.rparams.termCrit = val; }
|
||||
|
||||
CV_WRAP_SAME_PROPERTY(int, MaxCategories, impl.params)
|
||||
CV_WRAP_SAME_PROPERTY(int, MaxDepth, impl.params)
|
||||
CV_WRAP_SAME_PROPERTY(int, MinSampleCount, impl.params)
|
||||
CV_WRAP_SAME_PROPERTY(int, CVFolds, impl.params)
|
||||
CV_WRAP_SAME_PROPERTY(bool, UseSurrogates, impl.params)
|
||||
CV_WRAP_SAME_PROPERTY(bool, Use1SERule, impl.params)
|
||||
CV_WRAP_SAME_PROPERTY(bool, TruncatePrunedTree, impl.params)
|
||||
CV_WRAP_SAME_PROPERTY(float, RegressionAccuracy, impl.params)
|
||||
CV_WRAP_SAME_PROPERTY_S(cv::Mat, Priors, impl.params)
|
||||
inline int getMaxCategories() const CV_OVERRIDE { return impl.params.getMaxCategories(); }
|
||||
inline void setMaxCategories(int val) CV_OVERRIDE { impl.params.setMaxCategories(val); }
|
||||
inline int getMaxDepth() const CV_OVERRIDE { return impl.params.getMaxDepth(); }
|
||||
inline void setMaxDepth(int val) CV_OVERRIDE { impl.params.setMaxDepth(val); }
|
||||
inline int getMinSampleCount() const CV_OVERRIDE { return impl.params.getMinSampleCount(); }
|
||||
inline void setMinSampleCount(int val) CV_OVERRIDE { impl.params.setMinSampleCount(val); }
|
||||
inline int getCVFolds() const CV_OVERRIDE { return impl.params.getCVFolds(); }
|
||||
inline void setCVFolds(int val) CV_OVERRIDE { impl.params.setCVFolds(val); }
|
||||
inline bool getUseSurrogates() const CV_OVERRIDE { return impl.params.getUseSurrogates(); }
|
||||
inline void setUseSurrogates(bool val) CV_OVERRIDE { impl.params.setUseSurrogates(val); }
|
||||
inline bool getUse1SERule() const CV_OVERRIDE { return impl.params.getUse1SERule(); }
|
||||
inline void setUse1SERule(bool val) CV_OVERRIDE { impl.params.setUse1SERule(val); }
|
||||
inline bool getTruncatePrunedTree() const CV_OVERRIDE { return impl.params.getTruncatePrunedTree(); }
|
||||
inline void setTruncatePrunedTree(bool val) CV_OVERRIDE { impl.params.setTruncatePrunedTree(val); }
|
||||
inline float getRegressionAccuracy() const CV_OVERRIDE { return impl.params.getRegressionAccuracy(); }
|
||||
inline void setRegressionAccuracy(float val) CV_OVERRIDE { impl.params.setRegressionAccuracy(val); }
|
||||
inline cv::Mat getPriors() const CV_OVERRIDE { return impl.params.getPriors(); }
|
||||
inline void setPriors(const cv::Mat& val) CV_OVERRIDE { impl.params.setPriors(val); }
|
||||
|
||||
RTreesImpl() {}
|
||||
virtual ~RTreesImpl() {}
|
||||
virtual ~RTreesImpl() CV_OVERRIDE {}
|
||||
|
||||
String getDefaultName() const { return "opencv_ml_rtrees"; }
|
||||
String getDefaultName() const CV_OVERRIDE { return "opencv_ml_rtrees"; }
|
||||
|
||||
bool train( const Ptr<TrainData>& trainData, int flags )
|
||||
bool train( const Ptr<TrainData>& trainData, int flags ) CV_OVERRIDE
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
if (impl.getCVFolds() != 0)
|
||||
@ -455,19 +467,19 @@ public:
|
||||
return impl.train(trainData, flags);
|
||||
}
|
||||
|
||||
float predict( InputArray samples, OutputArray results, int flags ) const
|
||||
float predict( InputArray samples, OutputArray results, int flags ) const CV_OVERRIDE
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
return impl.predict(samples, results, flags);
|
||||
}
|
||||
|
||||
void write( FileStorage& fs ) const
|
||||
void write( FileStorage& fs ) const CV_OVERRIDE
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
impl.write(fs);
|
||||
}
|
||||
|
||||
void read( const FileNode& fn )
|
||||
void read( const FileNode& fn ) CV_OVERRIDE
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
impl.read(fn);
|
||||
@ -479,16 +491,16 @@ public:
|
||||
impl.getVotes(samples, results, flags);
|
||||
}
|
||||
|
||||
Mat getVarImportance() const { return Mat_<float>(impl.varImportance, true); }
|
||||
int getVarCount() const { return impl.getVarCount(); }
|
||||
Mat getVarImportance() const CV_OVERRIDE { return Mat_<float>(impl.varImportance, true); }
|
||||
int getVarCount() const CV_OVERRIDE { return impl.getVarCount(); }
|
||||
|
||||
bool isTrained() const { return impl.isTrained(); }
|
||||
bool isClassifier() const { return impl.isClassifier(); }
|
||||
bool isTrained() const CV_OVERRIDE { return impl.isTrained(); }
|
||||
bool isClassifier() const CV_OVERRIDE { return impl.isClassifier(); }
|
||||
|
||||
const vector<int>& getRoots() const { return impl.getRoots(); }
|
||||
const vector<Node>& getNodes() const { return impl.getNodes(); }
|
||||
const vector<Split>& getSplits() const { return impl.getSplits(); }
|
||||
const vector<int>& getSubsets() const { return impl.getSubsets(); }
|
||||
const vector<int>& getRoots() const CV_OVERRIDE { return impl.getRoots(); }
|
||||
const vector<Node>& getNodes() const CV_OVERRIDE { return impl.getNodes(); }
|
||||
const vector<Split>& getSplits() const CV_OVERRIDE { return impl.getSplits(); }
|
||||
const vector<int>& getSubsets() const CV_OVERRIDE { return impl.getSubsets(); }
|
||||
|
||||
DTreesImplForRTrees impl;
|
||||
};
|
||||
|
@ -149,7 +149,7 @@ struct SvmParams
|
||||
};
|
||||
|
||||
/////////////////////////////////////// SVM kernel ///////////////////////////////////////
|
||||
class SVMKernelImpl : public SVM::Kernel
|
||||
class SVMKernelImpl CV_FINAL : public SVM::Kernel
|
||||
{
|
||||
public:
|
||||
SVMKernelImpl( const SvmParams& _params = SvmParams() )
|
||||
@ -157,7 +157,7 @@ public:
|
||||
params = _params;
|
||||
}
|
||||
|
||||
int getType() const
|
||||
int getType() const CV_OVERRIDE
|
||||
{
|
||||
return params.kernelType;
|
||||
}
|
||||
@ -300,7 +300,7 @@ public:
|
||||
}
|
||||
|
||||
void calc( int vcount, int var_count, const float* vecs,
|
||||
const float* another, Qfloat* results )
|
||||
const float* another, Qfloat* results ) CV_OVERRIDE
|
||||
{
|
||||
switch( params.kernelType )
|
||||
{
|
||||
@ -414,7 +414,7 @@ ParamGrid SVM::getDefaultGrid( int param_id )
|
||||
}
|
||||
|
||||
|
||||
class SVMImpl : public SVM
|
||||
class SVMImpl CV_FINAL : public SVM
|
||||
{
|
||||
public:
|
||||
struct DecisionFunc
|
||||
@ -1241,7 +1241,7 @@ public:
|
||||
clear();
|
||||
}
|
||||
|
||||
void clear()
|
||||
void clear() CV_OVERRIDE
|
||||
{
|
||||
decision_func.clear();
|
||||
df_alpha.clear();
|
||||
@ -1255,34 +1255,39 @@ public:
|
||||
return uncompressed_sv;
|
||||
}
|
||||
|
||||
Mat getSupportVectors() const
|
||||
Mat getSupportVectors() const CV_OVERRIDE
|
||||
{
|
||||
return sv;
|
||||
}
|
||||
|
||||
CV_IMPL_PROPERTY(int, Type, params.svmType)
|
||||
CV_IMPL_PROPERTY(double, Gamma, params.gamma)
|
||||
CV_IMPL_PROPERTY(double, Coef0, params.coef0)
|
||||
CV_IMPL_PROPERTY(double, Degree, params.degree)
|
||||
CV_IMPL_PROPERTY(double, C, params.C)
|
||||
CV_IMPL_PROPERTY(double, Nu, params.nu)
|
||||
CV_IMPL_PROPERTY(double, P, params.p)
|
||||
CV_IMPL_PROPERTY_S(cv::Mat, ClassWeights, params.classWeights)
|
||||
CV_IMPL_PROPERTY_S(cv::TermCriteria, TermCriteria, params.termCrit)
|
||||
inline int getType() const CV_OVERRIDE { return params.svmType; }
|
||||
inline void setType(int val) CV_OVERRIDE { params.svmType = val; }
|
||||
inline double getGamma() const CV_OVERRIDE { return params.gamma; }
|
||||
inline void setGamma(double val) CV_OVERRIDE { params.gamma = val; }
|
||||
inline double getCoef0() const CV_OVERRIDE { return params.coef0; }
|
||||
inline void setCoef0(double val) CV_OVERRIDE { params.coef0 = val; }
|
||||
inline double getDegree() const CV_OVERRIDE { return params.degree; }
|
||||
inline void setDegree(double val) CV_OVERRIDE { params.degree = val; }
|
||||
inline double getC() const CV_OVERRIDE { return params.C; }
|
||||
inline void setC(double val) CV_OVERRIDE { params.C = val; }
|
||||
inline double getNu() const CV_OVERRIDE { return params.nu; }
|
||||
inline void setNu(double val) CV_OVERRIDE { params.nu = val; }
|
||||
inline double getP() const CV_OVERRIDE { return params.p; }
|
||||
inline void setP(double val) CV_OVERRIDE { params.p = val; }
|
||||
inline cv::Mat getClassWeights() const CV_OVERRIDE { return params.classWeights; }
|
||||
inline void setClassWeights(const cv::Mat& val) CV_OVERRIDE { params.classWeights = val; }
|
||||
inline cv::TermCriteria getTermCriteria() const CV_OVERRIDE { return params.termCrit; }
|
||||
inline void setTermCriteria(const cv::TermCriteria& val) CV_OVERRIDE { params.termCrit = val; }
|
||||
|
||||
int getKernelType() const
|
||||
{
|
||||
return params.kernelType;
|
||||
}
|
||||
|
||||
void setKernel(int kernelType)
|
||||
int getKernelType() const CV_OVERRIDE { return params.kernelType; }
|
||||
void setKernel(int kernelType) CV_OVERRIDE
|
||||
{
|
||||
params.kernelType = kernelType;
|
||||
if (kernelType != CUSTOM)
|
||||
kernel = makePtr<SVMKernelImpl>(params);
|
||||
}
|
||||
|
||||
void setCustomKernel(const Ptr<Kernel> &_kernel)
|
||||
void setCustomKernel(const Ptr<Kernel> &_kernel) CV_OVERRIDE
|
||||
{
|
||||
params.kernelType = CUSTOM;
|
||||
kernel = _kernel;
|
||||
@ -1606,7 +1611,7 @@ public:
|
||||
std::swap(decision_func, new_df);
|
||||
}
|
||||
|
||||
bool train( const Ptr<TrainData>& data, int )
|
||||
bool train( const Ptr<TrainData>& data, int ) CV_OVERRIDE
|
||||
{
|
||||
clear();
|
||||
|
||||
@ -1651,7 +1656,7 @@ public:
|
||||
sidx(_sidx), is_classification(_is_classification), k_fold(_k_fold), result(_result)
|
||||
{}
|
||||
|
||||
void operator()( const cv::Range& range ) const
|
||||
void operator()( const cv::Range& range ) const CV_OVERRIDE
|
||||
{
|
||||
int sample_count = samples.rows;
|
||||
int var_count_ = samples.cols;
|
||||
@ -1732,7 +1737,7 @@ public:
|
||||
bool trainAuto( const Ptr<TrainData>& data, int k_fold,
|
||||
ParamGrid C_grid, ParamGrid gamma_grid, ParamGrid p_grid,
|
||||
ParamGrid nu_grid, ParamGrid coef_grid, ParamGrid degree_grid,
|
||||
bool balanced )
|
||||
bool balanced ) CV_OVERRIDE
|
||||
{
|
||||
checkParams();
|
||||
|
||||
@ -1902,7 +1907,7 @@ public:
|
||||
returnDFVal = _returnDFVal;
|
||||
}
|
||||
|
||||
void operator()( const Range& range ) const
|
||||
void operator()(const Range& range) const CV_OVERRIDE
|
||||
{
|
||||
int svmType = svm->params.svmType;
|
||||
int sv_total = svm->sv.rows;
|
||||
@ -1995,7 +2000,7 @@ public:
|
||||
}
|
||||
|
||||
|
||||
float predict( InputArray _samples, OutputArray _results, int flags ) const
|
||||
float predict( InputArray _samples, OutputArray _results, int flags ) const CV_OVERRIDE
|
||||
{
|
||||
float result = 0;
|
||||
Mat samples = _samples.getMat(), results;
|
||||
@ -2023,7 +2028,7 @@ public:
|
||||
return result;
|
||||
}
|
||||
|
||||
double getDecisionFunction(int i, OutputArray _alpha, OutputArray _svidx ) const
|
||||
double getDecisionFunction(int i, OutputArray _alpha, OutputArray _svidx ) const CV_OVERRIDE
|
||||
{
|
||||
CV_Assert( 0 <= i && i < (int)decision_func.size());
|
||||
const DecisionFunc& df = decision_func[i];
|
||||
@ -2085,27 +2090,27 @@ public:
|
||||
fs << "}";
|
||||
}
|
||||
|
||||
bool isTrained() const
|
||||
bool isTrained() const CV_OVERRIDE
|
||||
{
|
||||
return !sv.empty();
|
||||
}
|
||||
|
||||
bool isClassifier() const
|
||||
bool isClassifier() const CV_OVERRIDE
|
||||
{
|
||||
return params.svmType == C_SVC || params.svmType == NU_SVC || params.svmType == ONE_CLASS;
|
||||
}
|
||||
|
||||
int getVarCount() const
|
||||
int getVarCount() const CV_OVERRIDE
|
||||
{
|
||||
return var_count;
|
||||
}
|
||||
|
||||
String getDefaultName() const
|
||||
String getDefaultName() const CV_OVERRIDE
|
||||
{
|
||||
return "opencv_ml_svm";
|
||||
}
|
||||
|
||||
void write( FileStorage& fs ) const
|
||||
void write( FileStorage& fs ) const CV_OVERRIDE
|
||||
{
|
||||
int class_count = !class_labels.empty() ? (int)class_labels.total() :
|
||||
params.svmType == ONE_CLASS ? 1 : 0;
|
||||
@ -2238,7 +2243,7 @@ public:
|
||||
setParams( _params );
|
||||
}
|
||||
|
||||
void read( const FileNode& fn )
|
||||
void read( const FileNode& fn ) CV_OVERRIDE
|
||||
{
|
||||
clear();
|
||||
|
||||
|
@ -57,7 +57,7 @@ namespace cv
|
||||
namespace ml
|
||||
{
|
||||
|
||||
class SVMSGDImpl : public SVMSGD
|
||||
class SVMSGDImpl CV_FINAL : public SVMSGD
|
||||
{
|
||||
|
||||
public:
|
||||
@ -65,36 +65,42 @@ public:
|
||||
|
||||
virtual ~SVMSGDImpl() {}
|
||||
|
||||
virtual bool train(const Ptr<TrainData>& data, int);
|
||||
virtual bool train(const Ptr<TrainData>& data, int) CV_OVERRIDE;
|
||||
|
||||
virtual float predict( InputArray samples, OutputArray results=noArray(), int flags = 0 ) const;
|
||||
virtual float predict( InputArray samples, OutputArray results=noArray(), int flags = 0 ) const CV_OVERRIDE;
|
||||
|
||||
virtual bool isClassifier() const;
|
||||
virtual bool isClassifier() const CV_OVERRIDE;
|
||||
|
||||
virtual bool isTrained() const;
|
||||
virtual bool isTrained() const CV_OVERRIDE;
|
||||
|
||||
virtual void clear();
|
||||
virtual void clear() CV_OVERRIDE;
|
||||
|
||||
virtual void write(FileStorage &fs) const;
|
||||
virtual void write(FileStorage &fs) const CV_OVERRIDE;
|
||||
|
||||
virtual void read(const FileNode &fn);
|
||||
virtual void read(const FileNode &fn) CV_OVERRIDE;
|
||||
|
||||
virtual Mat getWeights(){ return weights_; }
|
||||
virtual Mat getWeights() CV_OVERRIDE { return weights_; }
|
||||
|
||||
virtual float getShift(){ return shift_; }
|
||||
virtual float getShift() CV_OVERRIDE { return shift_; }
|
||||
|
||||
virtual int getVarCount() const { return weights_.cols; }
|
||||
virtual int getVarCount() const CV_OVERRIDE { return weights_.cols; }
|
||||
|
||||
virtual String getDefaultName() const {return "opencv_ml_svmsgd";}
|
||||
virtual String getDefaultName() const CV_OVERRIDE {return "opencv_ml_svmsgd";}
|
||||
|
||||
virtual void setOptimalParameters(int svmsgdType = ASGD, int marginType = SOFT_MARGIN);
|
||||
virtual void setOptimalParameters(int svmsgdType = ASGD, int marginType = SOFT_MARGIN) CV_OVERRIDE;
|
||||
|
||||
CV_IMPL_PROPERTY(int, SvmsgdType, params.svmsgdType)
|
||||
CV_IMPL_PROPERTY(int, MarginType, params.marginType)
|
||||
CV_IMPL_PROPERTY(float, MarginRegularization, params.marginRegularization)
|
||||
CV_IMPL_PROPERTY(float, InitialStepSize, params.initialStepSize)
|
||||
CV_IMPL_PROPERTY(float, StepDecreasingPower, params.stepDecreasingPower)
|
||||
CV_IMPL_PROPERTY_S(cv::TermCriteria, TermCriteria, params.termCrit)
|
||||
inline int getSvmsgdType() const CV_OVERRIDE { return params.svmsgdType; }
|
||||
inline void setSvmsgdType(int val) CV_OVERRIDE { params.svmsgdType = val; }
|
||||
inline int getMarginType() const CV_OVERRIDE { return params.marginType; }
|
||||
inline void setMarginType(int val) CV_OVERRIDE { params.marginType = val; }
|
||||
inline float getMarginRegularization() const CV_OVERRIDE { return params.marginRegularization; }
|
||||
inline void setMarginRegularization(float val) CV_OVERRIDE { params.marginRegularization = val; }
|
||||
inline float getInitialStepSize() const CV_OVERRIDE { return params.initialStepSize; }
|
||||
inline void setInitialStepSize(float val) CV_OVERRIDE { params.initialStepSize = val; }
|
||||
inline float getStepDecreasingPower() const CV_OVERRIDE { return params.stepDecreasingPower; }
|
||||
inline void setStepDecreasingPower(float val) CV_OVERRIDE { params.stepDecreasingPower = val; }
|
||||
inline cv::TermCriteria getTermCriteria() const CV_OVERRIDE { return params.termCrit; }
|
||||
inline void setTermCriteria(const cv::TermCriteria& val) CV_OVERRIDE { params.termCrit = val; }
|
||||
|
||||
private:
|
||||
void updateWeights(InputArray sample, bool positive, float stepSize, Mat &weights);
|
||||
|
Loading…
Reference in New Issue
Block a user