mirror of
https://github.com/opencv/opencv.git
synced 2025-01-18 06:03:15 +08:00
Fix android build warnings
This commit is contained in:
parent
8325a28d09
commit
07d92d9e5a
@ -81,46 +81,46 @@ Mat BOWMSCTrainer::cluster() const {
|
||||
return cluster(mergedDescriptors);
|
||||
}
|
||||
|
||||
Mat BOWMSCTrainer::cluster(const Mat& descriptors) const {
|
||||
Mat BOWMSCTrainer::cluster(const Mat& _descriptors) const {
|
||||
|
||||
CV_Assert(!descriptors.empty());
|
||||
CV_Assert(!_descriptors.empty());
|
||||
|
||||
// TODO: sort the descriptors before clustering.
|
||||
|
||||
|
||||
Mat icovar = Mat::eye(descriptors.cols,descriptors.cols,descriptors.type());
|
||||
Mat icovar = Mat::eye(_descriptors.cols,_descriptors.cols,_descriptors.type());
|
||||
|
||||
vector<Mat> initialCentres;
|
||||
initialCentres.push_back(descriptors.row(0));
|
||||
for (int i = 1; i < descriptors.rows; i++) {
|
||||
initialCentres.push_back(_descriptors.row(0));
|
||||
for (int i = 1; i < _descriptors.rows; i++) {
|
||||
double minDist = DBL_MAX;
|
||||
for (size_t j = 0; j < initialCentres.size(); j++) {
|
||||
minDist = std::min(minDist,
|
||||
cv::Mahalanobis(descriptors.row(i),initialCentres[j],
|
||||
cv::Mahalanobis(_descriptors.row(i),initialCentres[j],
|
||||
icovar));
|
||||
}
|
||||
if (minDist > clusterSize)
|
||||
initialCentres.push_back(descriptors.row(i));
|
||||
initialCentres.push_back(_descriptors.row(i));
|
||||
}
|
||||
|
||||
std::vector<std::list<cv::Mat> > clusters;
|
||||
clusters.resize(initialCentres.size());
|
||||
for (int i = 0; i < descriptors.rows; i++) {
|
||||
for (int i = 0; i < _descriptors.rows; i++) {
|
||||
int index = 0; double dist = 0, minDist = DBL_MAX;
|
||||
for (size_t j = 0; j < initialCentres.size(); j++) {
|
||||
dist = cv::Mahalanobis(descriptors.row(i),initialCentres[j],icovar);
|
||||
dist = cv::Mahalanobis(_descriptors.row(i),initialCentres[j],icovar);
|
||||
if (dist < minDist) {
|
||||
minDist = dist;
|
||||
index = (int)j;
|
||||
}
|
||||
}
|
||||
clusters[index].push_back(descriptors.row(i));
|
||||
clusters[index].push_back(_descriptors.row(i));
|
||||
}
|
||||
|
||||
// TODO: throw away small clusters.
|
||||
|
||||
Mat vocabulary;
|
||||
Mat centre = Mat::zeros(1,descriptors.cols,descriptors.type());
|
||||
Mat centre = Mat::zeros(1,_descriptors.cols,_descriptors.type());
|
||||
for (size_t i = 0; i < clusters.size(); i++) {
|
||||
centre.setTo(0);
|
||||
for (std::list<cv::Mat>::iterator Ci = clusters[i].begin(); Ci != clusters[i].end(); Ci++) {
|
||||
|
@ -445,16 +445,16 @@ FabMap1::~FabMap1() {
|
||||
}
|
||||
|
||||
void FabMap1::getLikelihoods(const Mat& queryImgDescriptor,
|
||||
const vector<Mat>& testImgDescriptors, vector<IMatch>& matches) {
|
||||
const vector<Mat>& testImageDescriptors, vector<IMatch>& matches) {
|
||||
|
||||
for (size_t i = 0; i < testImgDescriptors.size(); i++) {
|
||||
for (size_t i = 0; i < testImageDescriptors.size(); i++) {
|
||||
bool zq, zpq, Lzq;
|
||||
double logP = 0;
|
||||
for (int q = 0; q < clTree.cols; q++) {
|
||||
|
||||
zq = queryImgDescriptor.at<float>(0,q) > 0;
|
||||
zpq = queryImgDescriptor.at<float>(0,pq(q)) > 0;
|
||||
Lzq = testImgDescriptors[i].at<float>(0,q) > 0;
|
||||
Lzq = testImageDescriptors[i].at<float>(0,q) > 0;
|
||||
|
||||
logP += log((this->*PzGL)(q, zq, zpq, Lzq));
|
||||
|
||||
@ -490,16 +490,16 @@ FabMapLUT::~FabMapLUT() {
|
||||
}
|
||||
|
||||
void FabMapLUT::getLikelihoods(const Mat& queryImgDescriptor,
|
||||
const vector<Mat>& testImgDescriptors, vector<IMatch>& matches) {
|
||||
const vector<Mat>& testImageDescriptors, vector<IMatch>& matches) {
|
||||
|
||||
double precFactor = (double)pow(10.0, -precision);
|
||||
|
||||
for (size_t i = 0; i < testImgDescriptors.size(); i++) {
|
||||
for (size_t i = 0; i < testImageDescriptors.size(); i++) {
|
||||
unsigned long long int logP = 0;
|
||||
for (int q = 0; q < clTree.cols; q++) {
|
||||
logP += table[q][(queryImgDescriptor.at<float>(0,pq(q)) > 0) +
|
||||
((queryImgDescriptor.at<float>(0, q) > 0) << 1) +
|
||||
((testImgDescriptors[i].at<float>(0,q) > 0) << 2)];
|
||||
((testImageDescriptors[i].at<float>(0,q) > 0) << 2)];
|
||||
}
|
||||
matches.push_back(IMatch(0,(int)i,-precFactor*(double)logP,0));
|
||||
}
|
||||
@ -518,7 +518,7 @@ FabMapFBO::~FabMapFBO() {
|
||||
}
|
||||
|
||||
void FabMapFBO::getLikelihoods(const Mat& queryImgDescriptor,
|
||||
const vector<Mat>& testImgDescriptors, vector<IMatch>& matches) {
|
||||
const vector<Mat>& testImageDescriptors, vector<IMatch>& matches) {
|
||||
|
||||
std::multiset<WordStats> wordData;
|
||||
setWordStatistics(queryImgDescriptor, wordData);
|
||||
@ -526,7 +526,7 @@ void FabMapFBO::getLikelihoods(const Mat& queryImgDescriptor,
|
||||
vector<int> matchIndices;
|
||||
vector<IMatch> queryMatches;
|
||||
|
||||
for (size_t i = 0; i < testImgDescriptors.size(); i++) {
|
||||
for (size_t i = 0; i < testImageDescriptors.size(); i++) {
|
||||
queryMatches.push_back(IMatch(0,(int)i,0,0));
|
||||
matchIndices.push_back((int)i);
|
||||
}
|
||||
@ -543,7 +543,7 @@ void FabMapFBO::getLikelihoods(const Mat& queryImgDescriptor,
|
||||
|
||||
for (size_t i = 0; i < matchIndices.size(); i++) {
|
||||
bool Lzq =
|
||||
testImgDescriptors[matchIndices[i]].at<float>(0,wordIter->q) > 0;
|
||||
testImageDescriptors[matchIndices[i]].at<float>(0,wordIter->q) > 0;
|
||||
queryMatches[matchIndices[i]].likelihood +=
|
||||
log((this->*PzGL)(wordIter->q,zq,zpq,Lzq));
|
||||
currBest =
|
||||
@ -689,17 +689,17 @@ void FabMap2::add(const vector<Mat>& queryImgDescriptors) {
|
||||
}
|
||||
|
||||
void FabMap2::getLikelihoods(const Mat& queryImgDescriptor,
|
||||
const vector<Mat>& testImgDescriptors, vector<IMatch>& matches) {
|
||||
const vector<Mat>& testImageDescriptors, vector<IMatch>& matches) {
|
||||
|
||||
if (&testImgDescriptors== &(this->testImgDescriptors)) {
|
||||
if (&testImageDescriptors == &testImgDescriptors) {
|
||||
getIndexLikelihoods(queryImgDescriptor, testDefaults, testInvertedMap,
|
||||
matches);
|
||||
} else {
|
||||
CV_Assert(!(flags & MOTION_MODEL));
|
||||
vector<double> defaults;
|
||||
std::map<int, vector<int> > invertedMap;
|
||||
for (size_t i = 0; i < testImgDescriptors.size(); i++) {
|
||||
addToIndex(testImgDescriptors[i],defaults,invertedMap);
|
||||
for (size_t i = 0; i < testImageDescriptors.size(); i++) {
|
||||
addToIndex(testImageDescriptors[i],defaults,invertedMap);
|
||||
}
|
||||
getIndexLikelihoods(queryImgDescriptor, defaults, invertedMap, matches);
|
||||
}
|
||||
|
@ -1020,7 +1020,7 @@ cvRunHaarClassifierCascadeSum( const CvHaarClassifierCascade* _cascade,
|
||||
}
|
||||
else
|
||||
#endif
|
||||
#ifdef CV_HAAR_USE_SSE && !CV_HAAR_USE_AVX //old SSE optimization
|
||||
#if defined CV_HAAR_USE_SSE && CV_HAAR_USE_SSE && !CV_HAAR_USE_AVX //old SSE optimization
|
||||
if(haveSSE2)
|
||||
{
|
||||
for( i = start_stage; i < cascade->count; i++ )
|
||||
@ -1111,23 +1111,23 @@ cvRunHaarClassifierCascadeSum( const CvHaarClassifierCascade* _cascade,
|
||||
for( i = start_stage; i < cascade->count; i++ )
|
||||
{
|
||||
stage_sum = 0.0;
|
||||
int j = 0;
|
||||
int k = 0;
|
||||
#ifdef CV_HAAR_USE_AVX
|
||||
if(haveAVX)
|
||||
{
|
||||
for( ; j < cascade->stage_classifier[i].count-8; j+=8 )
|
||||
for( ; k < cascade->stage_classifier[i].count-8; k+=8 )
|
||||
{
|
||||
stage_sum += icvEvalHidHaarClassifierAVX(
|
||||
cascade->stage_classifier[i].classifier+j,
|
||||
cascade->stage_classifier[i].classifier+k,
|
||||
variance_norm_factor, p_offset );
|
||||
}
|
||||
}
|
||||
#endif
|
||||
for(; j < cascade->stage_classifier[i].count; j++ )
|
||||
for(; k < cascade->stage_classifier[i].count; k++ )
|
||||
{
|
||||
|
||||
stage_sum += icvEvalHidHaarClassifier(
|
||||
cascade->stage_classifier[i].classifier + j,
|
||||
cascade->stage_classifier[i].classifier + k,
|
||||
variance_norm_factor, p_offset );
|
||||
}
|
||||
|
||||
|
@ -50,7 +50,7 @@ using namespace std;
|
||||
|
||||
///////////////////////
|
||||
// Functions
|
||||
void read_imgList(const string& filename, vector<Mat>& images) {
|
||||
static void read_imgList(const string& filename, vector<Mat>& images) {
|
||||
std::ifstream file(filename.c_str(), ifstream::in);
|
||||
if (!file) {
|
||||
string error_message = "No valid input file was given, please check the given filename.";
|
||||
@ -62,7 +62,7 @@ void read_imgList(const string& filename, vector<Mat>& images) {
|
||||
}
|
||||
}
|
||||
|
||||
Mat formatImagesForPCA(const vector<Mat> &data)
|
||||
static Mat formatImagesForPCA(const vector<Mat> &data)
|
||||
{
|
||||
Mat dst(data.size(), data[0].rows*data[0].cols, CV_32F);
|
||||
for(unsigned int i = 0; i < data.size(); i++)
|
||||
@ -74,7 +74,7 @@ Mat formatImagesForPCA(const vector<Mat> &data)
|
||||
return dst;
|
||||
}
|
||||
|
||||
Mat toGrayscale(InputArray _src) {
|
||||
static Mat toGrayscale(InputArray _src) {
|
||||
Mat src = _src.getMat();
|
||||
// only allow one channel
|
||||
if(src.channels() != 1) {
|
||||
@ -95,7 +95,7 @@ struct params
|
||||
string winName;
|
||||
};
|
||||
|
||||
void onTrackbar(int pos, void* ptr)
|
||||
static void onTrackbar(int pos, void* ptr)
|
||||
{
|
||||
cout << "Retained Variance = " << pos << "% ";
|
||||
cout << "re-calculating PCA..." << std::flush;
|
||||
|
Loading…
Reference in New Issue
Block a user