Small enhancements (adding const, etc.)

This commit is contained in:
Alexander Zaitsev 2018-05-19 23:07:28 +03:00
parent 6f4e195489
commit 96f8f853c8
10 changed files with 51 additions and 63 deletions

View File

@ -25,9 +25,9 @@
#include "classify.h"
#ifdef __UNIX__
#include <assert.h>
#include <cassert>
#endif
#include <stdio.h>
#include <cstdio>
using tesseract::TFile;
@ -110,7 +110,6 @@ void FreePermConfig(PERM_CONFIG Config) {
*/
ADAPT_CLASS NewAdaptedClass() {
ADAPT_CLASS Class;
int i;
Class = (ADAPT_CLASS) Emalloc (sizeof (ADAPT_CLASS_STRUCT));
Class->NumPermConfigs = 0;
@ -122,7 +121,7 @@ ADAPT_CLASS NewAdaptedClass() {
zero_all_bits (Class->PermProtos, WordsInVectorOfSize (MAX_NUM_PROTOS));
zero_all_bits (Class->PermConfigs, WordsInVectorOfSize (MAX_NUM_CONFIGS));
for (i = 0; i < MAX_NUM_CONFIGS; i++)
for (int i = 0; i < MAX_NUM_CONFIGS; i++)
TempConfigFor (Class, i) = nullptr;
return (Class);
@ -132,9 +131,7 @@ ADAPT_CLASS NewAdaptedClass() {
/*-------------------------------------------------------------------------*/
void free_adapted_class(ADAPT_CLASS adapt_class) {
int i;
for (i = 0; i < MAX_NUM_CONFIGS; i++) {
for (int i = 0; i < MAX_NUM_CONFIGS; i++) {
if (ConfigIsPermanent (adapt_class, i)
&& PermConfigFor (adapt_class, i) != nullptr)
FreePermConfig (PermConfigFor (adapt_class, i));
@ -164,7 +161,6 @@ namespace tesseract {
*/
ADAPT_TEMPLATES Classify::NewAdaptedTemplates(bool InitFromUnicharset) {
ADAPT_TEMPLATES Templates;
int i;
Templates = (ADAPT_TEMPLATES) Emalloc (sizeof (ADAPT_TEMPLATES_STRUCT));
@ -173,7 +169,7 @@ ADAPT_TEMPLATES Classify::NewAdaptedTemplates(bool InitFromUnicharset) {
Templates->NumNonEmptyClasses = 0;
/* Insert an empty class for each unichar id in unicharset */
for (i = 0; i < MAX_NUM_CLASSES; i++) {
for (int i = 0; i < MAX_NUM_CLASSES; i++) {
Templates->Class[i] = nullptr;
if (InitFromUnicharset && i < unicharset.size()) {
AddAdaptedClass(Templates, NewAdaptedClass(), i);
@ -197,8 +193,7 @@ int Classify::GetFontinfoId(ADAPT_CLASS Class, uint8_t ConfigId) {
void free_adapted_templates(ADAPT_TEMPLATES templates) {
if (templates != nullptr) {
int i;
for (i = 0; i < (templates->Templates)->NumClasses; i++)
for (int i = 0; i < (templates->Templates)->NumClasses; i++)
free_adapted_class (templates->Class[i]);
free_int_templates (templates->Templates);
Efree(templates);
@ -264,7 +259,6 @@ namespace tesseract {
* @note History: Wed Mar 20 13:35:29 1991, DSJ, Created.
*/
void Classify::PrintAdaptedTemplates(FILE *File, ADAPT_TEMPLATES Templates) {
int i;
INT_CLASS IClass;
ADAPT_CLASS AClass;
@ -274,7 +268,7 @@ void Classify::PrintAdaptedTemplates(FILE *File, ADAPT_TEMPLATES Templates) {
fprintf (File, " Id NC NPC NP NPP\n");
fprintf (File, "------------------------\n");
for (i = 0; i < (Templates->Templates)->NumClasses; i++) {
for (int i = 0; i < (Templates->Templates)->NumClasses; i++) {
IClass = Templates->Templates->Class[i];
AClass = Templates->Class[i];
if (!IsEmptyAdaptedClass (AClass)) {
@ -357,7 +351,6 @@ namespace tesseract {
* @note History: Mon Mar 18 15:18:10 1991, DSJ, Created.
*/
ADAPT_TEMPLATES Classify::ReadAdaptedTemplates(TFile *fp) {
int i;
ADAPT_TEMPLATES Templates;
/* first read the high level adaptive template struct */
@ -368,7 +361,7 @@ ADAPT_TEMPLATES Classify::ReadAdaptedTemplates(TFile *fp) {
Templates->Templates = ReadIntTemplates(fp);
/* then read in the adaptive info for each class */
for (i = 0; i < (Templates->Templates)->NumClasses; i++) {
for (int i = 0; i < (Templates->Templates)->NumClasses; i++) {
Templates->Class[i] = ReadAdaptedClass(fp);
}
return (Templates);

View File

@ -21,7 +21,7 @@
----------------------------------------------------------------------------**/
#include "blobclass.h"
#include <stdio.h>
#include <cstdio>
#include "classify.h"
#include "efio.h"
@ -97,8 +97,8 @@ void Classify::LearnBlob(const STRING& fontname, TBLOB* blob,
bool Classify::WriteTRFile(const STRING& filename) {
STRING tr_filename = filename + ".tr";
FILE* fp = Efopen(tr_filename.string(), "wb");
size_t len = tr_file_data_.length();
bool result =
const size_t len = tr_file_data_.length();
const bool result =
fwrite(&tr_file_data_[0], sizeof(tr_file_data_[0]), len, fp) == len;
fclose(fp);
tr_file_data_.truncate_at(0);

View File

@ -41,7 +41,7 @@ double ErrorCounter::ComputeErrorRate(ShapeClassifier* classifier,
const FontInfoTable& fontinfo_table,
const GenericVector<Pix*>& page_images, SampleIterator* it,
double* unichar_error, double* scaled_error, STRING* fonts_report) {
int fontsize = it->sample_set()->NumFonts();
const int fontsize = it->sample_set()->NumFonts();
ErrorCounter counter(classifier->GetUnicharset(), fontsize);
GenericVector<UnicharRating> results;
@ -83,7 +83,7 @@ double ErrorCounter::ComputeErrorRate(ShapeClassifier* classifier,
}
++total_samples;
}
double total_time = 1.0 * (clock() - start) / CLOCKS_PER_SEC;
const double total_time = 1.0 * (clock() - start) / CLOCKS_PER_SEC;
// Create the appropriate error report.
unscaled_error = counter.ReportErrors(report_level, boosting_mode,
fontinfo_table,
@ -303,9 +303,9 @@ bool ErrorCounter::AccumulateJunk(bool debug,
TrainingSample* sample) {
// For junk we accept no answer, or an explicit shape answer matching the
// class id of the sample.
int num_results = results.size();
int font_id = sample->font_id();
int unichar_id = sample->class_id();
const int num_results = results.size();
const int font_id = sample->font_id();
const int unichar_id = sample->class_id();
int percent = 0;
if (num_results > 0)
percent = IntCastRounded(results[0].rating * 100);
@ -446,7 +446,7 @@ bool ErrorCounter::ReportString(bool even_if_empty, const Counts& counts,
"FontAttr=%.4g%%, Multi=%.4g%%, "
"Answers=%.3g, Rank=%.3g, "
"OKjunk=%.4g%%, Badjunk=%.4g%%";
int max_str_len = strlen(format_str) + kMaxExtraLength * (CT_SIZE - 1) + 1;
const size_t max_str_len = strlen(format_str) + kMaxExtraLength * (CT_SIZE - 1) + 1;
char* formatted_str = new char[max_str_len];
snprintf(formatted_str, max_str_len, format_str,
rates[CT_UNICHAR_TOP1_ERR] * 100.0,
@ -475,9 +475,9 @@ bool ErrorCounter::ReportString(bool even_if_empty, const Counts& counts,
// Computes the error rates and returns in rates which is an array of size
// CT_SIZE. Returns false if there is no data, leaving rates unchanged.
bool ErrorCounter::ComputeRates(const Counts& counts, double rates[CT_SIZE]) {
int ok_samples = counts.n[CT_UNICHAR_TOP_OK] + counts.n[CT_UNICHAR_TOP1_ERR] +
const int ok_samples = counts.n[CT_UNICHAR_TOP_OK] + counts.n[CT_UNICHAR_TOP1_ERR] +
counts.n[CT_REJECT];
int junk_samples = counts.n[CT_REJECTED_JUNK] + counts.n[CT_ACCEPTED_JUNK];
const int junk_samples = counts.n[CT_REJECTED_JUNK] + counts.n[CT_ACCEPTED_JUNK];
// Compute rates for normal chars.
double denominator = static_cast<double>(MAX(ok_samples, 1));
for (int ct = 0; ct <= CT_RANK; ++ct)

View File

@ -99,7 +99,6 @@ void Classify::ComputeIntCharNormArray(const FEATURE_STRUCT& norm_feature,
*/
void Classify::ComputeIntFeatures(FEATURE_SET Features,
INT_FEATURE_ARRAY IntFeatures) {
int Fid;
FEATURE Feature;
FLOAT32 YShift;
@ -108,7 +107,7 @@ void Classify::ComputeIntFeatures(FEATURE_SET Features,
else
YShift = Y_SHIFT;
for (Fid = 0; Fid < Features->NumFeatures; Fid++) {
for (int Fid = 0; Fid < Features->NumFeatures; Fid++) {
Feature = Features->Features[Fid];
IntFeatures[Fid].X =

View File

@ -20,16 +20,16 @@
----------------------------------------------------------------------------*/
#include "const.h"
#include "fpoint.h"
#include <stdio.h>
#include <math.h>
#include <cstdio>
#include <cmath>
/*----------------------------------------------------------------------------
Public Code
----------------------------------------------------------------------------*/
FLOAT32 DistanceBetween(FPOINT A, FPOINT B) {
double xd = XDelta(A, B);
double yd = YDelta(A, B);
const double xd = XDelta(A, B);
const double yd = YDelta(A, B);
return sqrt(static_cast<double>(xd * xd + yd * yd));
}

View File

@ -52,16 +52,16 @@ void IntFeatureDist::Set(const GenericVector<int>& indexed_features,
int canonical_count, bool value) {
total_feature_weight_ = canonical_count;
for (int i = 0; i < indexed_features.size(); ++i) {
int f = indexed_features[i];
const int f = indexed_features[i];
features_[f] = value;
for (int dir = -kNumOffsetMaps; dir <= kNumOffsetMaps; ++dir) {
if (dir == 0) continue;
int mapped_f = feature_map_->OffsetFeature(f, dir);
const int mapped_f = feature_map_->OffsetFeature(f, dir);
if (mapped_f >= 0) {
features_delta_one_[mapped_f] = value;
for (int dir2 = -kNumOffsetMaps; dir2 <= kNumOffsetMaps; ++dir2) {
if (dir2 == 0) continue;
int mapped_f2 = feature_map_->OffsetFeature(mapped_f, dir2);
const int mapped_f2 = feature_map_->OffsetFeature(mapped_f, dir2);
if (mapped_f2 >= 0)
features_delta_two_[mapped_f2] = value;
}
@ -74,12 +74,12 @@ void IntFeatureDist::Set(const GenericVector<int>& indexed_features,
// Set feature vector.
double IntFeatureDist::FeatureDistance(
const GenericVector<int>& features) const {
int num_test_features = features.size();
double denominator = total_feature_weight_ + num_test_features;
const int num_test_features = features.size();
const double denominator = total_feature_weight_ + num_test_features;
double misses = denominator;
for (int i = 0; i < num_test_features; ++i) {
int index = features[i];
double weight = 1.0;
const int index = features[i];
const double weight = 1.0;
if (features_[index]) {
// A perfect match.
misses -= 2.0 * weight;
@ -97,12 +97,12 @@ double IntFeatureDist::FeatureDistance(
// Set feature vector.
double IntFeatureDist::DebugFeatureDistance(
const GenericVector<int>& features) const {
int num_test_features = features.size();
double denominator = total_feature_weight_ + num_test_features;
const int num_test_features = features.size();
const double denominator = total_feature_weight_ + num_test_features;
double misses = denominator;
for (int i = 0; i < num_test_features; ++i) {
int index = features[i];
double weight = 1.0;
const int index = features[i];
const double weight = 1.0;
INT_FEATURE_STRUCT f = feature_map_->InverseMapFeature(features[i]);
tprintf("Testing feature weight %g:", weight);
f.print();

View File

@ -40,7 +40,7 @@ int ShapeRating::FirstResultWithUnichar(
const ShapeTable& shape_table,
UNICHAR_ID unichar_id) {
for (int r = 0; r < results.size(); ++r) {
int shape_id = results[r].shape_id;
const int shape_id = results[r].shape_id;
const Shape& shape = shape_table.GetShape(shape_id);
if (shape.ContainsUnichar(unichar_id)) {
return r;
@ -66,15 +66,13 @@ int UnicharRating::FirstResultWithUnichar(
// Writes to the given file. Returns false in case of error.
bool UnicharAndFonts::Serialize(FILE* fp) const {
if (fwrite(&unichar_id, sizeof(unichar_id), 1, fp) != 1) return false;
if (!font_ids.Serialize(fp)) return false;
return true;
return font_ids.Serialize(fp);
}
// Reads from the given file. Returns false in case of error.
bool UnicharAndFonts::DeSerialize(TFile* fp) {
if (fp->FReadEndian(&unichar_id, sizeof(unichar_id), 1) != 1) return false;
if (!font_ids.DeSerialize(fp)) return false;
return true;
return font_ids.DeSerialize(fp);
}
// Sort function to sort a pair of UnicharAndFonts by unichar_id.
@ -89,8 +87,7 @@ bool Shape::Serialize(FILE* fp) const {
uint8_t sorted = unichars_sorted_;
if (fwrite(&sorted, sizeof(sorted), 1, fp) != 1)
return false;
if (!unichars_.SerializeClasses(fp)) return false;
return true;
return unichars_.SerializeClasses(fp);
}
// Reads from the given file. Returns false in case of error.
@ -244,8 +241,7 @@ ShapeTable::ShapeTable(const UNICHARSET& unicharset)
// Writes to the given file. Returns false in case of error.
bool ShapeTable::Serialize(FILE* fp) const {
if (!shape_table_.Serialize(fp)) return false;
return true;
return shape_table_.Serialize(fp);
}
// Reads from the given file. Returns false in case of error.

View File

@ -31,9 +31,9 @@ namespace tesseract {
int TessClassifier::UnicharClassifySample(
const TrainingSample& sample, Pix* page_pix, int debug,
UNICHAR_ID keep_this, GenericVector<UnicharRating>* results) {
int old_matcher_level = classify_->matcher_debug_level;
int old_matcher_flags = classify_->matcher_debug_flags;
int old_classify_level = classify_->classify_debug_level;
const int old_matcher_level = classify_->matcher_debug_level;
const int old_matcher_flags = classify_->matcher_debug_flags;
const int old_classify_level = classify_->classify_debug_level;
if (debug) {
// Explicitly set values of various control parameters to generate debug
// output if required, restoring the old values after classifying.

View File

@ -20,7 +20,7 @@
#include "trainingsample.h"
#include <math.h>
#include <cmath>
#include "allheaders.h"
#include "helpers.h"
#include "intfeaturemap.h"
@ -163,7 +163,7 @@ TrainingSample* TrainingSample::RandomizedCopy(int index) const {
TrainingSample* sample = Copy();
if (index >= 0 && index < kSampleRandomSize) {
++index; // Remove the first combination.
int yshift = kYShiftValues[index / kSampleScaleSize];
const int yshift = kYShiftValues[index / kSampleScaleSize];
double scaling = kScaleValues[index % kSampleScaleSize];
for (int i = 0; i < num_features_; ++i) {
double result = (features_[i].X - kRandomizingCenter) * scaling;

View File

@ -426,7 +426,7 @@ int TrainingSampleSet::ReliablySeparable(int font_id1, int class_id1,
// Find a canonical2 feature that is not in cloud1.
for (int f = 0; f < canonical2.size(); ++f) {
int feature = canonical2[f];
const int feature = canonical2[f];
if (cloud1[feature])
continue;
// Gather the near neighbours of f.
@ -464,7 +464,7 @@ const TrainingSample* TrainingSampleSet::GetCanonicalSample(
ASSERT_HOST(font_class_array_ != nullptr);
int font_index = font_id_map_.SparseToCompact(font_id);
if (font_index < 0) return nullptr;
int sample_index = (*font_class_array_)(font_index,
const int sample_index = (*font_class_array_)(font_index,
class_id).canonical_sample;
return sample_index >= 0 ? samples_[sample_index] : nullptr;
}
@ -549,7 +549,7 @@ void TrainingSampleSet::SetupFontIdMap() {
// Number of samples for each font_id.
GenericVector<int> font_counts;
for (int s = 0; s < samples_.size(); ++s) {
int font_id = samples_[s]->font_id();
const int font_id = samples_[s]->font_id();
while (font_id >= font_counts.size())
font_counts.push_back(0);
++font_counts[font_id];
@ -692,9 +692,9 @@ void TrainingSampleSet::ReplicateAndRandomizeSamples() {
// canonical features to those that truly represent all samples.
void TrainingSampleSet::ComputeCanonicalFeatures() {
ASSERT_HOST(font_class_array_ != nullptr);
int font_size = font_id_map_.CompactSize();
const int font_size = font_id_map_.CompactSize();
for (int font_index = 0; font_index < font_size; ++font_index) {
int font_id = font_id_map_.CompactToSparse(font_index);
const int font_id = font_id_map_.CompactToSparse(font_index);
for (int c = 0; c < unicharset_size_; ++c) {
int num_samples = NumClassSamples(font_id, c, false);
if (num_samples == 0)
@ -732,7 +732,7 @@ void TrainingSampleSet::ComputeCloudFeatures(int feature_space_size) {
// Adds all fonts of the given class to the shape.
void TrainingSampleSet::AddAllFontsForClass(int class_id, Shape* shape) const {
for (int f = 0; f < font_id_map_.CompactSize(); ++f) {
int font_id = font_id_map_.CompactToSparse(f);
const int font_id = font_id_map_.CompactToSparse(f);
shape->AddToShape(class_id, font_id);
}
}