fixed compile warnings from MSVC; fixed warnings in Python bindings; added some debugging code

This commit is contained in:
Vadim Pisarevsky 2014-08-03 12:46:28 +04:00
parent 9cb79b9abb
commit eeb786fcfd
10 changed files with 30 additions and 30 deletions

View File

@ -205,7 +205,7 @@ public:
class CV_EXPORTS_W NormalBayesClassifier : public StatModel
{
public:
class CV_EXPORTS_W_MAP Params
class CV_EXPORTS_W Params
{
public:
Params();
@ -231,8 +231,8 @@ public:
public:
Params(int defaultK=10, bool isclassifier=true);
int defaultK;
bool isclassifier;
CV_PROP_RW int defaultK;
CV_PROP_RW bool isclassifier;
};
virtual void setParams(const Params& p) = 0;
virtual Params getParams() const = 0;
@ -328,9 +328,9 @@ public:
explicit Params(int nclusters=DEFAULT_NCLUSTERS, int covMatType=EM::COV_MAT_DIAGONAL,
const TermCriteria& termCrit=TermCriteria(TermCriteria::COUNT+TermCriteria::EPS,
EM::DEFAULT_MAX_ITERS, 1e-6));
int nclusters;
int covMatType;
TermCriteria termCrit;
CV_PROP_RW int nclusters;
CV_PROP_RW int covMatType;
CV_PROP_RW TermCriteria termCrit;
};
virtual void setParams(const Params& p) = 0;

View File

@ -123,7 +123,7 @@ public:
void clear()
{
min_val = max_val = min_val1 = max_val1 = 0.;
rng = RNG(-1);
rng = RNG((uint64)-1);
weights.clear();
trained = false;
}
@ -300,7 +300,7 @@ public:
{
int maxIdx[] = {0, 0};
minMaxIdx(outputs, 0, 0, 0, maxIdx);
return maxIdx[0] + maxIdx[1];
return (float)(maxIdx[0] + maxIdx[1]);
}
return 0.f;
@ -702,9 +702,8 @@ public:
train_backprop( inputs, outputs, sw, termcrit ) :
train_rprop( inputs, outputs, sw, termcrit );
trained = true;
return iter;
trained = iter > 0;
return trained;
}
int train_backprop( const Mat& inputs, const Mat& outputs, const Mat& _sw, TermCriteria termCrit )

View File

@ -220,12 +220,13 @@ public:
void updateWeightsAndTrim( int treeidx, vector<int>& sidx )
{
putchar('<');
int i, n = (int)w->sidx.size();
int nvars = (int)varIdx.size();
double sumw = 0., C = 1.;
cv::AutoBuffer<double> buf(n*3 + nvars);
cv::AutoBuffer<double> buf(n + nvars);
double* result = buf;
float* sbuf = (float*)(result + n*3);
float* sbuf = (float*)(result + n);
Mat sample(1, nvars, CV_32F, sbuf);
int predictFlags = bparams.boostType == Boost::DISCRETE ? (PREDICT_MAX_VOTE | RAW_OUTPUT) : PREDICT_SUM;
predictFlags |= COMPRESSED_INPUT;
@ -373,6 +374,7 @@ public:
if( w->sample_weights[si] >= threshold )
sidx.push_back(si);
}
putchar('>'); fflush(stdout);
}
float predictTrees( const Range& range, const Mat& sample, int flags0 ) const

View File

@ -310,7 +310,7 @@ public:
varType.create(1, nvars, CV_8U);
varType = Scalar::all(VAR_ORDERED);
if( noutputvars == 1 )
varType.at<uchar>(ninputvars) = responses.type() < CV_32F ? VAR_CATEGORICAL : VAR_ORDERED;
varType.at<uchar>(ninputvars) = (uchar)(responses.type() < CV_32F ? VAR_CATEGORICAL : VAR_ORDERED);
}
if( noutputvars > 1 )
@ -558,7 +558,7 @@ public:
if( tp == VAR_MISSED )
haveMissed = true;
rowvals.push_back(val);
rowtypes.push_back(tp);
rowtypes.push_back((uchar)tp);
token = strtok(NULL, delimiters);
if (!token)
break;
@ -880,7 +880,7 @@ public:
if( s )
{
j = s[i];
CV_DbgAssert( 0 <= j && j < nsamples );
CV_Assert( 0 <= j && j < nsamples );
}
values[i] = src[j*sstep];
if( values[i] == MISSED_VAL )
@ -955,7 +955,7 @@ public:
if( vptr )
{
j = vptr[i];
CV_DbgAssert( 0 <= j && j < nvars );
CV_Assert( 0 <= j && j < nvars );
}
buf[i] = src[j*vstep];
}

View File

@ -108,7 +108,7 @@ float StatModel::calcError( const Ptr<TrainData>& data, bool testerr, OutputArra
if( _resp.needed() )
resp.copyTo(_resp);
return err / n * (isclassifier ? 100 : 1);
return (float)(err / n * (isclassifier ? 100 : 1));
}
void StatModel::save(const String& filename) const

View File

@ -173,7 +173,7 @@ public:
}
float result = 0.f;
float inv_scale = 1./k;
float inv_scale = 1.f/k;
for( testidx = 0; testidx < testcount; testidx++ )
{

View File

@ -111,7 +111,7 @@ namespace ml
termCrit.type |= TermCriteria::EPS;
termCrit.epsilon = epsilon;
}
int iters = (double)fn["iterations"];
int iters = (int)fn["iterations"];
if( iters > 0 )
{
termCrit.type |= TermCriteria::COUNT;
@ -134,7 +134,7 @@ namespace ml
}
int class_idx;
int Tn;
double Tn;
double value;
int parent;
@ -164,7 +164,7 @@ namespace ml
}
int varIdx;
int inversed;
bool inversed;
float quality;
int next;
float c;
@ -179,7 +179,7 @@ namespace ml
vector<WNode> wnodes;
vector<WSplit> wsplits;
vector<int> wsubsets;
vector<int> cv_Tn;
vector<double> cv_Tn;
vector<double> cv_node_risk;
vector<double> cv_node_error;
vector<int> cv_labels;

View File

@ -90,7 +90,7 @@ public:
{
DTreesImpl::clear();
oobError = 0.;
rng = RNG(-1);
rng = RNG((uint64)-1);
}
const vector<int>& getActiveVars()
@ -177,7 +177,6 @@ public:
for( treeidx = 0; treeidx < ntrees; treeidx++ )
{
putchar('.'); fflush(stdout);
for( i = 0; i < n; i++ )
oobmask[i] = (uchar)1;

View File

@ -1587,7 +1587,7 @@ public:
bool balanced )
{
int svmType = params.svmType;
RNG rng(-1);
RNG rng((uint64)-1);
if( svmType == ONE_CLASS )
// current implementation of "auto" svm does not support the 1-class case.

View File

@ -730,7 +730,7 @@ DTreesImpl::WSplit DTreesImpl::findSplitOrdClass( int vi, const vector<int>& _si
{
split.varIdx = vi;
split.c = (values[sorted_idx[best_i]] + values[sorted_idx[best_i+1]])*0.5f;
split.inversed = 0;
split.inversed = false;
split.quality = (float)best_val;
}
return split;
@ -744,12 +744,12 @@ void DTreesImpl::clusterCategories( const double* vectors, int n, int m, double*
cv::AutoBuffer<double> buf(n + k);
double *v_weights = buf, *c_weights = buf + n;
bool modified = true;
RNG r(-1);
RNG r((uint64)-1);
// assign labels randomly
for( i = 0; i < n; i++ )
{
int sum = 0;
double sum = 0;
const double* v = vectors + i*m;
labels[i] = i < k ? i : r.uniform(0, k);
@ -1063,7 +1063,7 @@ DTreesImpl::WSplit DTreesImpl::findSplitOrdReg( int vi, const vector<int>& _sidx
{
split.varIdx = vi;
split.c = (values[sorted_idx[best_i]] + values[sorted_idx[best_i+1]])*0.5f;
split.inversed = 0;
split.inversed = false;
split.quality = (float)best_val;
}
return split;