NOP changes from static analysis in issue 1205

This commit is contained in:
Ray Smith 2014-08-12 16:09:12 -07:00
parent dbf6197471
commit 736d327473
6 changed files with 13 additions and 11 deletions

View File

@ -451,8 +451,8 @@ void DENORM::XHeightRange(int unichar_id, const UNICHARSET& unicharset,
&min_top, &max_top);
// Calculate the scale factor we'll use to get to image y-pixels
double midx = (bbox.left() + bbox.right()) / 2;
double ydiff = (bbox.top() - bbox.bottom()) + 2;
double midx = (bbox.left() + bbox.right()) / 2.0;
double ydiff = (bbox.top() - bbox.bottom()) + 2.0;
FCOORD mid_bot(midx, bbox.bottom()), tmid_bot;
FCOORD mid_high(midx, bbox.bottom() + ydiff), tmid_high;
DenormTransform(NULL, mid_bot, &tmid_bot);

View File

@ -58,7 +58,6 @@ const int par2 = 6750 / (approx_dist * approx_dist);
TESSLINE* ApproximateOutline(bool allow_detailed_fx, C_OUTLINE* c_outline) {
EDGEPT *edgept; // converted steps
TBOX loop_box; // bounding box
inT32 area; // loop area
EDGEPT stack_edgepts[FASTEDGELENGTH]; // converted path
@ -73,9 +72,9 @@ TESSLINE* ApproximateOutline(bool allow_detailed_fx, C_OUTLINE* c_outline) {
if (!poly_wide_objects_better && loop_box.width() > area)
area = loop_box.width();
area *= area;
edgept = edgesteps_to_edgepts(c_outline, edgepts);
edgesteps_to_edgepts(c_outline, edgepts);
fix2(edgepts, area);
edgept = poly2 (edgepts, area); // 2nd approximation.
EDGEPT* edgept = poly2(edgepts, area); // 2nd approximation.
EDGEPT* startpt = edgept;
EDGEPT* result = NULL;
EDGEPT* prev_result = NULL;

View File

@ -184,8 +184,8 @@ void Dawg::init(DawgType type, const STRING &lang,
// Set bit masks.
flag_start_bit_ = ceil(log(static_cast<double>(unicharset_size_)) / log(2.0));
next_node_start_bit_ = flag_start_bit_ + NUM_FLAG_BITS;
letter_mask_ = ~(~0 << flag_start_bit_);
next_node_mask_ = ~0 << (flag_start_bit_ + NUM_FLAG_BITS);
letter_mask_ = ~(~0ull << flag_start_bit_);
next_node_mask_ = ~0ull << (flag_start_bit_ + NUM_FLAG_BITS);
flags_mask_ = ~(letter_mask_ | next_node_mask_);
debug_level_ = debug_level;

View File

@ -732,7 +732,7 @@ void TabVector::Evaluate(const ICOORD& vertical, TabFind* finder) {
gutter_width, median_gutter);
}
it.extract();
++num_deleted_boxes = true;
++num_deleted_boxes;
}
}
}

View File

@ -394,7 +394,6 @@ inT32 row_words2( //compute space size
if (!blob->joined_to_prev ()) {
blob_box = blob->bounding_box ();
this_valid = blob_box.width () >= min_width;
this_valid = TRUE;
if (this_valid && prev_valid
&& blob_box.left () - prev_x < maxwidth) {
gap_stats.add (blob_box.left () - prev_x, 1);

View File

@ -1161,8 +1161,12 @@ void LanguageModel::FillConsistencyInfo(
float actual_gap =
static_cast<float>(word_res->GetBlobsGap(curr_col-1));
float gap_ratio = expected_gap / actual_gap;
// TODO(daria): find a good way to tune this heuristic estimate.
if (gap_ratio < 1/2 || gap_ratio > 2) {
// TODO(rays) The gaps seem to be way off most of the time, saved by
// the error here that the ratio was compared to 1/2, when it should
// have been 0.5f. Find the source of the gaps discrepancy and put
// the 0.5f here in place of 0.0f.
// Test on 2476595.sj, pages 0 to 6. (In French.)
if (gap_ratio < 0.0f || gap_ratio > 2.0f) {
consistency_info->num_inconsistent_spaces++;
}
if (language_model_debug_level > 1) {