* 'master' of https://github.com/tesseract-ocr/tesseract:
  Fix CID 1164579 (Explicit null dereferenced)
  print help for tesstrain.sh; fixes #1469
  Fix CID 1395882 (Uninitialized scalar variable)
  Fix comments
  Move content of ipoints.h to points.h and remove ipoints.h
  remove duplicate help from combine_lang_model
  Fix typo.
  use tprintf instead of printf to be able disable messages by quiet option (issue #1240)
  add "sudo ldconfig" to install instruction. fixes #1212
  unittest: Replace NULL by nullptr
  unittest: Format code
  tesseract app: check if input file exists; fixes #1023
  Format code (replace ( xxx ) by (xxx))
  Simplify boolean expressions
  Win32: use the ISO C and C++ conformant name "_putenv" instead of deprecated "putenv"
This commit is contained in:
Zdenko Podobný 2018-10-03 19:21:42 +02:00
commit dcc50a867f
94 changed files with 2430 additions and 2588 deletions

View File

@ -500,6 +500,7 @@ echo "You can now build and install $PACKAGE_NAME by running:"
echo ""
echo "$ make"
echo "$ sudo make install"
echo "$ sudo ldconfig"
echo ""
AM_COND_IF([ASCIIDOC],

View File

@ -413,9 +413,9 @@ TESS_API struct Boxa* TESS_CALL TessBaseAPIGetComponentImages(TessBaseAPI* handl
}
TESS_API struct Boxa*
TESS_CALL TessBaseAPIGetComponentImages1( TessBaseAPI* handle, const TessPageIteratorLevel level, const BOOL text_only,
const BOOL raw_image, const int raw_padding,
struct Pixa** pixa, int** blockids, int** paraids)
TESS_CALL TessBaseAPIGetComponentImages1(TessBaseAPI* handle, const TessPageIteratorLevel level, const BOOL text_only,
const BOOL raw_image, const int raw_padding,
struct Pixa** pixa, int** blockids, int** paraids)
{
return handle->GetComponentImages(level, text_only != FALSE, raw_image, raw_padding, pixa, blockids, paraids);
}
@ -825,37 +825,37 @@ TESS_API ETEXT_DESC* TESS_CALL TessMonitorCreate()
return new ETEXT_DESC();
}
TESS_API void TESS_CALL TessMonitorDelete( ETEXT_DESC* monitor )
TESS_API void TESS_CALL TessMonitorDelete(ETEXT_DESC* monitor)
{
delete monitor;
}
TESS_API void TESS_CALL TessMonitorSetCancelFunc( ETEXT_DESC* monitor, TessCancelFunc cancelFunc )
TESS_API void TESS_CALL TessMonitorSetCancelFunc(ETEXT_DESC* monitor, TessCancelFunc cancelFunc)
{
monitor->cancel = cancelFunc;
}
TESS_API void TESS_CALL TessMonitorSetCancelThis( ETEXT_DESC* monitor, void* cancelThis )
TESS_API void TESS_CALL TessMonitorSetCancelThis(ETEXT_DESC* monitor, void* cancelThis)
{
monitor->cancel_this = cancelThis;
}
TESS_API void* TESS_CALL TessMonitorGetCancelThis( ETEXT_DESC* monitor )
TESS_API void* TESS_CALL TessMonitorGetCancelThis(ETEXT_DESC* monitor)
{
return monitor->cancel_this;
}
TESS_API void TESS_CALL TessMonitorSetProgressFunc( ETEXT_DESC* monitor, TessProgressFunc progressFunc )
TESS_API void TESS_CALL TessMonitorSetProgressFunc(ETEXT_DESC* monitor, TessProgressFunc progressFunc)
{
monitor->progress_callback2 = progressFunc;
}
TESS_API int TESS_CALL TessMonitorGetProgress( ETEXT_DESC* monitor )
TESS_API int TESS_CALL TessMonitorGetProgress(ETEXT_DESC* monitor)
{
return monitor->progress;
}
TESS_API void TESS_CALL TessMonitorSetDeadlineMSecs( ETEXT_DESC* monitor, int deadline )
TESS_API void TESS_CALL TessMonitorSetDeadlineMSecs(ETEXT_DESC* monitor, int deadline)
{
monitor->set_deadline_msecs( deadline );
monitor->set_deadline_msecs(deadline);
}

View File

@ -151,7 +151,7 @@ TESS_API void TESS_CALL TessBaseAPIDelete(TessBaseAPI* handle);
TESS_API size_t TESS_CALL TessBaseAPIGetOpenCLDevice(TessBaseAPI* handle, void **device);
TESS_API void TESS_CALL TessBaseAPISetInputName( TessBaseAPI* handle, const char* name);
TESS_API void TESS_CALL TessBaseAPISetInputName(TessBaseAPI* handle, const char* name);
TESS_API const char* TESS_CALL TessBaseAPIGetInputName(TessBaseAPI* handle);
TESS_API void TESS_CALL TessBaseAPISetInputImage(TessBaseAPI* handle, struct Pix* pix);
@ -165,13 +165,13 @@ TESS_API void TESS_CALL TessBaseAPISetOutputName(TessBaseAPI* handle, const cha
TESS_API BOOL TESS_CALL TessBaseAPISetVariable(TessBaseAPI* handle, const char* name, const char* value);
TESS_API BOOL TESS_CALL TessBaseAPISetDebugVariable(TessBaseAPI* handle, const char* name, const char* value);
TESS_API BOOL TESS_CALL TessBaseAPIGetIntVariable( const TessBaseAPI* handle, const char* name, int* value);
TESS_API BOOL TESS_CALL TessBaseAPIGetBoolVariable( const TessBaseAPI* handle, const char* name, BOOL* value);
TESS_API BOOL TESS_CALL TessBaseAPIGetIntVariable(const TessBaseAPI* handle, const char* name, int* value);
TESS_API BOOL TESS_CALL TessBaseAPIGetBoolVariable(const TessBaseAPI* handle, const char* name, BOOL* value);
TESS_API BOOL TESS_CALL TessBaseAPIGetDoubleVariable(const TessBaseAPI* handle, const char* name, double* value);
TESS_API const char*
TESS_CALL TessBaseAPIGetStringVariable(const TessBaseAPI* handle, const char* name);
TESS_API void TESS_CALL TessBaseAPIPrintVariables( const TessBaseAPI* handle, FILE* fp);
TESS_API void TESS_CALL TessBaseAPIPrintVariables(const TessBaseAPI* handle, FILE* fp);
TESS_API BOOL TESS_CALL TessBaseAPIPrintVariablesToFile(const TessBaseAPI* handle, const char* filename);
#ifdef TESS_CAPI_INCLUDE_BASEAPI
@ -231,27 +231,27 @@ TESS_API void TESS_CALL TessBaseAPISetThresholder(TessBaseAPI* handle, TessImag
#endif
TESS_API struct Pix*
TESS_CALL TessBaseAPIGetThresholdedImage( TessBaseAPI* handle);
TESS_CALL TessBaseAPIGetThresholdedImage(TessBaseAPI* handle);
TESS_API struct Boxa*
TESS_CALL TessBaseAPIGetRegions( TessBaseAPI* handle, struct Pixa** pixa);
TESS_CALL TessBaseAPIGetRegions(TessBaseAPI* handle, struct Pixa** pixa);
TESS_API struct Boxa*
TESS_CALL TessBaseAPIGetTextlines( TessBaseAPI* handle, struct Pixa** pixa, int** blockids);
TESS_CALL TessBaseAPIGetTextlines(TessBaseAPI* handle, struct Pixa** pixa, int** blockids);
TESS_API struct Boxa*
TESS_CALL TessBaseAPIGetTextlines1( TessBaseAPI* handle, const BOOL raw_image, const int raw_padding,
struct Pixa** pixa, int** blockids, int** paraids);
TESS_CALL TessBaseAPIGetTextlines1(TessBaseAPI* handle, const BOOL raw_image, const int raw_padding,
struct Pixa** pixa, int** blockids, int** paraids);
TESS_API struct Boxa*
TESS_CALL TessBaseAPIGetStrips( TessBaseAPI* handle, struct Pixa** pixa, int** blockids);
TESS_CALL TessBaseAPIGetStrips(TessBaseAPI* handle, struct Pixa** pixa, int** blockids);
TESS_API struct Boxa*
TESS_CALL TessBaseAPIGetWords( TessBaseAPI* handle, struct Pixa** pixa);
TESS_CALL TessBaseAPIGetWords(TessBaseAPI* handle, struct Pixa** pixa);
TESS_API struct Boxa*
TESS_CALL TessBaseAPIGetConnectedComponents(TessBaseAPI* handle, struct Pixa** cc);
TESS_API struct Boxa*
TESS_CALL TessBaseAPIGetComponentImages( TessBaseAPI* handle, const TessPageIteratorLevel level, const BOOL text_only,
struct Pixa** pixa, int** blockids);
TESS_CALL TessBaseAPIGetComponentImages(TessBaseAPI* handle, const TessPageIteratorLevel level, const BOOL text_only,
struct Pixa** pixa, int** blockids);
TESS_API struct Boxa*
TESS_CALL TessBaseAPIGetComponentImages1( TessBaseAPI* handle, const TessPageIteratorLevel level, const BOOL text_only,
const BOOL raw_image, const int raw_padding,
struct Pixa** pixa, int** blockids, int** paraids);
TESS_CALL TessBaseAPIGetComponentImages1(TessBaseAPI* handle, const TessPageIteratorLevel level, const BOOL text_only,
const BOOL raw_image, const int raw_padding,
struct Pixa** pixa, int** blockids, int** paraids);
TESS_API int TESS_CALL TessBaseAPIGetThresholdedImageScaleFactor(const TessBaseAPI* handle);
@ -405,13 +405,13 @@ TESS_API float TESS_CALL TessChoiceIteratorConfidence(const TessChoiceIterator*
/* Progress monitor */
TESS_API ETEXT_DESC* TESS_CALL TessMonitorCreate();
TESS_API void TESS_CALL TessMonitorDelete( ETEXT_DESC* monitor );
TESS_API void TESS_CALL TessMonitorSetCancelFunc( ETEXT_DESC* monitor, TessCancelFunc cancelFunc );
TESS_API void TESS_CALL TessMonitorSetCancelThis( ETEXT_DESC* monitor, void* cancelThis );
TESS_API void* TESS_CALL TessMonitorGetCancelThis( ETEXT_DESC* monitor );
TESS_API void TESS_CALL TessMonitorSetProgressFunc( ETEXT_DESC* monitor, TessProgressFunc progressFunc );
TESS_API int TESS_CALL TessMonitorGetProgress( ETEXT_DESC* monitor );
TESS_API void TESS_CALL TessMonitorSetDeadlineMSecs( ETEXT_DESC* monitor, int deadline );
TESS_API void TESS_CALL TessMonitorDelete(ETEXT_DESC* monitor);
TESS_API void TESS_CALL TessMonitorSetCancelFunc(ETEXT_DESC* monitor, TessCancelFunc cancelFunc);
TESS_API void TESS_CALL TessMonitorSetCancelThis(ETEXT_DESC* monitor, void* cancelThis);
TESS_API void* TESS_CALL TessMonitorGetCancelThis(ETEXT_DESC* monitor);
TESS_API void TESS_CALL TessMonitorSetProgressFunc(ETEXT_DESC* monitor, TessProgressFunc progressFunc);
TESS_API int TESS_CALL TessMonitorGetProgress(ETEXT_DESC* monitor);
TESS_API void TESS_CALL TessMonitorSetDeadlineMSecs(ETEXT_DESC* monitor, int deadline);
#ifndef DISABLED_LEGACY_ENGINE

View File

@ -530,6 +530,13 @@ int main(int argc, char** argv) {
return EXIT_SUCCESS;
}
if (FILE* file = fopen(image, "r")) {
fclose(file);
} else {
fprintf(stderr, "Cannot open input file: %s\n", image);
return EXIT_FAILURE;
}
FixPageSegMode(&api, pagesegmode);
if (dpi) {
@ -537,12 +544,13 @@ int main(int argc, char** argv) {
snprintf(dpi_string, 254, "%d", dpi);
api.SetVariable("user_defined_dpi", dpi_string);
}
if (pagesegmode == tesseract::PSM_AUTO_ONLY) {
int ret_val = EXIT_SUCCESS;
Pix* pixs = pixRead(image);
if (!pixs) {
fprintf(stderr, "Cannot open input file: %s\n", image);
fprintf(stderr, "Leptonica can't process input file: %s\n", image);
return 2;
}

View File

@ -16,7 +16,6 @@ noinst_HEADERS = \
ccstruct.h coutln.h crakedge.h \
debugpixa.h detlinefit.h dppoint.h fontinfo.h \
imagedata.h \
ipoints.h \
linlsq.h matrix.h mod128.h normalis.h \
ocrblock.h ocrpara.h ocrrow.h otsuthr.h \
pageres.h params_training_featdef.h \

View File

@ -31,7 +31,7 @@
#include "environ.h" // for l_uint32
#include "helpers.h" // for UpdateRange, IntCastRounded
#include "host.h" // for NearlyEqual, TRUE
#include "ipoints.h" // for operator+=, ICOORD::rotate
#include "points.h" // for operator+=, ICOORD::rotate
struct Pix;

View File

@ -23,7 +23,6 @@
#include <cstdint> // for int16_t, int32_t
#include "bits16.h" // for BITS16
#include "elst.h" // for ELIST_ITERATOR, ELISTIZEH, ELIST_LINK
#include "ipoints.h" // for operator+=
#include "mod128.h" // for DIR128, DIRBITS
#include "platform.h" // for DLLSYM
#include "points.h" // for ICOORD, FCOORD

View File

@ -1,485 +0,0 @@
/**********************************************************************
* File: ipoints.h (Formerly icoords.h)
* Description: Inline functions for coords.h.
* Author: Ray Smith
* Created: Fri Jun 21 15:14:21 BST 1991
*
* (C) Copyright 1991, Hewlett-Packard Ltd.
** Licensed under the Apache License, Version 2.0 (the "License");
** you may not use this file except in compliance with the License.
** You may obtain a copy of the License at
** http://www.apache.org/licenses/LICENSE-2.0
** Unless required by applicable law or agreed to in writing, software
** distributed under the License is distributed on an "AS IS" BASIS,
** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
** See the License for the specific language governing permissions and
** limitations under the License.
*
**********************************************************************/
#ifndef IPOINTS_H
#define IPOINTS_H
#include <cmath>
#include "points.h" // ICOORD
/**********************************************************************
* operator!
*
* Rotate an ICOORD 90 degrees anticlockwise.
**********************************************************************/
inline ICOORD
operator! ( //rotate 90 deg anti
const ICOORD & src //thing to rotate
) {
ICOORD result; //output
result.xcoord = -src.ycoord;
result.ycoord = src.xcoord;
return result;
}
/**********************************************************************
* operator-
*
* Unary minus of an ICOORD.
**********************************************************************/
inline ICOORD
operator- ( //unary minus
const ICOORD & src //thing to minus
) {
ICOORD result; //output
result.xcoord = -src.xcoord;
result.ycoord = -src.ycoord;
return result;
}
/**********************************************************************
* operator+
*
* Add 2 ICOORDS.
**********************************************************************/
inline ICOORD
operator+ ( //sum vectors
const ICOORD & op1, //operands
const ICOORD & op2) {
ICOORD sum; //result
sum.xcoord = op1.xcoord + op2.xcoord;
sum.ycoord = op1.ycoord + op2.ycoord;
return sum;
}
/**********************************************************************
* operator+=
*
* Add 2 ICOORDS.
**********************************************************************/
inline ICOORD &
operator+= ( //sum vectors
ICOORD & op1, //operands
const ICOORD & op2) {
op1.xcoord += op2.xcoord;
op1.ycoord += op2.ycoord;
return op1;
}
/**********************************************************************
* operator-
*
* Subtract 2 ICOORDS.
**********************************************************************/
inline ICOORD
operator- ( //subtract vectors
const ICOORD & op1, //operands
const ICOORD & op2) {
ICOORD sum; //result
sum.xcoord = op1.xcoord - op2.xcoord;
sum.ycoord = op1.ycoord - op2.ycoord;
return sum;
}
/**********************************************************************
* operator-=
*
* Subtract 2 ICOORDS.
**********************************************************************/
inline ICOORD &
operator-= ( //sum vectors
ICOORD & op1, //operands
const ICOORD & op2) {
op1.xcoord -= op2.xcoord;
op1.ycoord -= op2.ycoord;
return op1;
}
/**********************************************************************
* operator%
*
* Scalar product of 2 ICOORDS.
**********************************************************************/
inline int32_t
operator% ( //scalar product
const ICOORD & op1, //operands
const ICOORD & op2) {
return op1.xcoord * op2.xcoord + op1.ycoord * op2.ycoord;
}
/**********************************************************************
* operator*
*
* Cross product of 2 ICOORDS.
**********************************************************************/
inline int32_t operator *( //cross product
const ICOORD &op1, //operands
const ICOORD &op2) {
return op1.xcoord * op2.ycoord - op1.ycoord * op2.xcoord;
}
/**********************************************************************
* operator*
*
* Scalar multiply of an ICOORD.
**********************************************************************/
inline ICOORD operator *( //scalar multiply
const ICOORD &op1, //operands
int16_t scale) {
ICOORD result; //output
result.xcoord = op1.xcoord * scale;
result.ycoord = op1.ycoord * scale;
return result;
}
inline ICOORD operator *( //scalar multiply
int16_t scale,
const ICOORD &op1 //operands
) {
ICOORD result; //output
result.xcoord = op1.xcoord * scale;
result.ycoord = op1.ycoord * scale;
return result;
}
/**********************************************************************
* operator*=
*
* Scalar multiply of an ICOORD.
**********************************************************************/
inline ICOORD &
operator*= ( //scalar multiply
ICOORD & op1, //operands
int16_t scale) {
op1.xcoord *= scale;
op1.ycoord *= scale;
return op1;
}
/**********************************************************************
* operator/
*
* Scalar divide of an ICOORD.
**********************************************************************/
inline ICOORD
operator/ ( //scalar divide
const ICOORD & op1, //operands
int16_t scale) {
ICOORD result; //output
result.xcoord = op1.xcoord / scale;
result.ycoord = op1.ycoord / scale;
return result;
}
/**********************************************************************
* operator/=
*
* Scalar divide of an ICOORD.
**********************************************************************/
inline ICOORD &
operator/= ( //scalar divide
ICOORD & op1, //operands
int16_t scale) {
op1.xcoord /= scale;
op1.ycoord /= scale;
return op1;
}
/**********************************************************************
* ICOORD::rotate
*
* Rotate an ICOORD by the given (normalized) (cos,sin) vector.
**********************************************************************/
inline void ICOORD::rotate( //rotate by vector
const FCOORD& vec) {
int16_t tmp;
tmp = (int16_t) floor (xcoord * vec.x () - ycoord * vec.y () + 0.5);
ycoord = (int16_t) floor (ycoord * vec.x () + xcoord * vec.y () + 0.5);
xcoord = tmp;
}
/**********************************************************************
* operator!
*
* Rotate an FCOORD 90 degrees anticlockwise.
**********************************************************************/
inline FCOORD
operator! ( //rotate 90 deg anti
const FCOORD & src //thing to rotate
) {
FCOORD result; //output
result.xcoord = -src.ycoord;
result.ycoord = src.xcoord;
return result;
}
/**********************************************************************
* operator-
*
* Unary minus of an FCOORD.
**********************************************************************/
inline FCOORD
operator- ( //unary minus
const FCOORD & src //thing to minus
) {
FCOORD result; //output
result.xcoord = -src.xcoord;
result.ycoord = -src.ycoord;
return result;
}
/**********************************************************************
* operator+
*
* Add 2 FCOORDS.
**********************************************************************/
inline FCOORD
operator+ ( //sum vectors
const FCOORD & op1, //operands
const FCOORD & op2) {
FCOORD sum; //result
sum.xcoord = op1.xcoord + op2.xcoord;
sum.ycoord = op1.ycoord + op2.ycoord;
return sum;
}
/**********************************************************************
* operator+=
*
* Add 2 FCOORDS.
**********************************************************************/
inline FCOORD &
operator+= ( //sum vectors
FCOORD & op1, //operands
const FCOORD & op2) {
op1.xcoord += op2.xcoord;
op1.ycoord += op2.ycoord;
return op1;
}
/**********************************************************************
* operator-
*
* Subtract 2 FCOORDS.
**********************************************************************/
inline FCOORD
operator- ( //subtract vectors
const FCOORD & op1, //operands
const FCOORD & op2) {
FCOORD sum; //result
sum.xcoord = op1.xcoord - op2.xcoord;
sum.ycoord = op1.ycoord - op2.ycoord;
return sum;
}
/**********************************************************************
* operator-=
*
* Subtract 2 FCOORDS.
**********************************************************************/
inline FCOORD &
operator-= ( //sum vectors
FCOORD & op1, //operands
const FCOORD & op2) {
op1.xcoord -= op2.xcoord;
op1.ycoord -= op2.ycoord;
return op1;
}
/**********************************************************************
* operator%
*
* Scalar product of 2 FCOORDS.
**********************************************************************/
inline float
operator% ( //scalar product
const FCOORD & op1, //operands
const FCOORD & op2) {
return op1.xcoord * op2.xcoord + op1.ycoord * op2.ycoord;
}
/**********************************************************************
* operator*
*
* Cross product of 2 FCOORDS.
**********************************************************************/
inline float operator *( //cross product
const FCOORD &op1, //operands
const FCOORD &op2) {
return op1.xcoord * op2.ycoord - op1.ycoord * op2.xcoord;
}
/**********************************************************************
* operator*
*
* Scalar multiply of an FCOORD.
**********************************************************************/
inline FCOORD operator *( //scalar multiply
const FCOORD &op1, //operands
float scale) {
FCOORD result; //output
result.xcoord = op1.xcoord * scale;
result.ycoord = op1.ycoord * scale;
return result;
}
inline FCOORD operator *( //scalar multiply
float scale,
const FCOORD &op1 //operands
) {
FCOORD result; //output
result.xcoord = op1.xcoord * scale;
result.ycoord = op1.ycoord * scale;
return result;
}
/**********************************************************************
* operator*=
*
* Scalar multiply of an FCOORD.
**********************************************************************/
inline FCOORD &
operator*= ( //scalar multiply
FCOORD & op1, //operands
float scale) {
op1.xcoord *= scale;
op1.ycoord *= scale;
return op1;
}
/**********************************************************************
* operator/
*
* Scalar divide of an FCOORD.
**********************************************************************/
inline FCOORD
operator/ ( //scalar divide
const FCOORD & op1, //operands
float scale) {
FCOORD result; //output
if (scale != 0) {
result.xcoord = op1.xcoord / scale;
result.ycoord = op1.ycoord / scale;
}
return result;
}
/**********************************************************************
* operator/=
*
* Scalar divide of an FCOORD.
**********************************************************************/
inline FCOORD &
operator/= ( //scalar divide
FCOORD & op1, //operands
float scale) {
if (scale != 0) {
op1.xcoord /= scale;
op1.ycoord /= scale;
}
return op1;
}
/**********************************************************************
* rotate
*
* Rotate an FCOORD by the given (normalized) (cos,sin) vector.
**********************************************************************/
inline void FCOORD::rotate( //rotate by vector
const FCOORD vec) {
float tmp;
tmp = xcoord * vec.x () - ycoord * vec.y ();
ycoord = ycoord * vec.x () + xcoord * vec.y ();
xcoord = tmp;
}
inline void FCOORD::unrotate(const FCOORD& vec) {
rotate(FCOORD(vec.x(), -vec.y()));
}
#endif

View File

@ -23,6 +23,7 @@
#include <cmath> // for sqrt, atan2
#include <cstdio>
#include "elst.h"
#include "errcode.h" // for ASSERT_HOST
#include "platform.h" // for DLLSYM
class FCOORD;
@ -317,5 +318,460 @@ class DLLSYM FCOORD
float ycoord;
};
#include "ipoints.h" /*do inline funcs */
/**********************************************************************
* operator!
*
* Rotate an ICOORD 90 degrees anticlockwise.
**********************************************************************/
inline ICOORD
operator! ( //rotate 90 deg anti
const ICOORD & src //thing to rotate
) {
ICOORD result; //output
result.xcoord = -src.ycoord;
result.ycoord = src.xcoord;
return result;
}
/**********************************************************************
* operator-
*
* Unary minus of an ICOORD.
**********************************************************************/
inline ICOORD
operator- ( //unary minus
const ICOORD & src //thing to minus
) {
ICOORD result; //output
result.xcoord = -src.xcoord;
result.ycoord = -src.ycoord;
return result;
}
/**********************************************************************
* operator+
*
* Add 2 ICOORDS.
**********************************************************************/
inline ICOORD
operator+ ( //sum vectors
const ICOORD & op1, //operands
const ICOORD & op2) {
ICOORD sum; //result
sum.xcoord = op1.xcoord + op2.xcoord;
sum.ycoord = op1.ycoord + op2.ycoord;
return sum;
}
/**********************************************************************
* operator+=
*
* Add 2 ICOORDS.
**********************************************************************/
inline ICOORD &
operator+= ( //sum vectors
ICOORD & op1, //operands
const ICOORD & op2) {
op1.xcoord += op2.xcoord;
op1.ycoord += op2.ycoord;
return op1;
}
/**********************************************************************
* operator-
*
* Subtract 2 ICOORDS.
**********************************************************************/
inline ICOORD
operator- ( //subtract vectors
const ICOORD & op1, //operands
const ICOORD & op2) {
ICOORD sum; //result
sum.xcoord = op1.xcoord - op2.xcoord;
sum.ycoord = op1.ycoord - op2.ycoord;
return sum;
}
/**********************************************************************
* operator-=
*
* Subtract 2 ICOORDS.
**********************************************************************/
inline ICOORD &
operator-= ( //subtract vectors
ICOORD & op1, //operands
const ICOORD & op2) {
op1.xcoord -= op2.xcoord;
op1.ycoord -= op2.ycoord;
return op1;
}
/**********************************************************************
* operator%
*
* Scalar product of 2 ICOORDS.
**********************************************************************/
inline int32_t
operator% ( //scalar product
const ICOORD & op1, //operands
const ICOORD & op2) {
return op1.xcoord * op2.xcoord + op1.ycoord * op2.ycoord;
}
/**********************************************************************
* operator*
*
* Cross product of 2 ICOORDS.
**********************************************************************/
inline int32_t operator *( //cross product
const ICOORD &op1, //operands
const ICOORD &op2) {
return op1.xcoord * op2.ycoord - op1.ycoord * op2.xcoord;
}
/**********************************************************************
* operator*
*
* Scalar multiply of an ICOORD.
**********************************************************************/
inline ICOORD operator *( //scalar multiply
const ICOORD &op1, //operands
int16_t scale) {
ICOORD result; //output
result.xcoord = op1.xcoord * scale;
result.ycoord = op1.ycoord * scale;
return result;
}
inline ICOORD operator *( //scalar multiply
int16_t scale,
const ICOORD &op1 //operands
) {
ICOORD result; //output
result.xcoord = op1.xcoord * scale;
result.ycoord = op1.ycoord * scale;
return result;
}
/**********************************************************************
* operator*=
*
* Scalar multiply of an ICOORD.
**********************************************************************/
inline ICOORD &
operator*= ( //scalar multiply
ICOORD & op1, //operands
int16_t scale) {
op1.xcoord *= scale;
op1.ycoord *= scale;
return op1;
}
/**********************************************************************
* operator/
*
* Scalar divide of an ICOORD.
**********************************************************************/
inline ICOORD
operator/ ( //scalar divide
const ICOORD & op1, //operands
int16_t scale) {
ICOORD result; //output
result.xcoord = op1.xcoord / scale;
result.ycoord = op1.ycoord / scale;
return result;
}
/**********************************************************************
* operator/=
*
* Scalar divide of an ICOORD.
**********************************************************************/
inline ICOORD &
operator/= ( //scalar divide
ICOORD & op1, //operands
int16_t scale) {
op1.xcoord /= scale;
op1.ycoord /= scale;
return op1;
}
/**********************************************************************
* ICOORD::rotate
*
* Rotate an ICOORD by the given (normalized) (cos,sin) vector.
**********************************************************************/
inline void ICOORD::rotate( //rotate by vector
const FCOORD& vec) {
int16_t tmp;
tmp = (int16_t) floor (xcoord * vec.x () - ycoord * vec.y () + 0.5);
ycoord = (int16_t) floor (ycoord * vec.x () + xcoord * vec.y () + 0.5);
xcoord = tmp;
}
/**********************************************************************
* operator!
*
* Rotate an FCOORD 90 degrees anticlockwise.
**********************************************************************/
inline FCOORD
operator! ( //rotate 90 deg anti
const FCOORD & src //thing to rotate
) {
FCOORD result; //output
result.xcoord = -src.ycoord;
result.ycoord = src.xcoord;
return result;
}
/**********************************************************************
* operator-
*
* Unary minus of an FCOORD.
**********************************************************************/
inline FCOORD
operator- ( //unary minus
const FCOORD & src //thing to minus
) {
FCOORD result; //output
result.xcoord = -src.xcoord;
result.ycoord = -src.ycoord;
return result;
}
/**********************************************************************
* operator+
*
* Add 2 FCOORDS.
**********************************************************************/
inline FCOORD
operator+ ( //sum vectors
const FCOORD & op1, //operands
const FCOORD & op2) {
FCOORD sum; //result
sum.xcoord = op1.xcoord + op2.xcoord;
sum.ycoord = op1.ycoord + op2.ycoord;
return sum;
}
/**********************************************************************
* operator+=
*
* Add 2 FCOORDS.
**********************************************************************/
inline FCOORD &
operator+= ( //sum vectors
FCOORD & op1, //operands
const FCOORD & op2) {
op1.xcoord += op2.xcoord;
op1.ycoord += op2.ycoord;
return op1;
}
/**********************************************************************
* operator-
*
* Subtract 2 FCOORDS.
**********************************************************************/
inline FCOORD
operator- ( //subtract vectors
const FCOORD & op1, //operands
const FCOORD & op2) {
FCOORD sum; //result
sum.xcoord = op1.xcoord - op2.xcoord;
sum.ycoord = op1.ycoord - op2.ycoord;
return sum;
}
/**********************************************************************
* operator-=
*
* Subtract 2 FCOORDS.
**********************************************************************/
inline FCOORD &
operator-= ( //subtract vectors
FCOORD & op1, //operands
const FCOORD & op2) {
op1.xcoord -= op2.xcoord;
op1.ycoord -= op2.ycoord;
return op1;
}
/**********************************************************************
* operator%
*
* Scalar product of 2 FCOORDS.
**********************************************************************/
inline float
operator% ( //scalar product
const FCOORD & op1, //operands
const FCOORD & op2) {
return op1.xcoord * op2.xcoord + op1.ycoord * op2.ycoord;
}
/**********************************************************************
* operator*
*
* Cross product of 2 FCOORDS.
**********************************************************************/
inline float operator *( //cross product
const FCOORD &op1, //operands
const FCOORD &op2) {
return op1.xcoord * op2.ycoord - op1.ycoord * op2.xcoord;
}
/**********************************************************************
* operator*
*
* Scalar multiply of an FCOORD.
**********************************************************************/
inline FCOORD operator *( //scalar multiply
const FCOORD &op1, //operands
float scale) {
FCOORD result; //output
result.xcoord = op1.xcoord * scale;
result.ycoord = op1.ycoord * scale;
return result;
}
inline FCOORD operator *( //scalar multiply
float scale,
const FCOORD &op1 //operands
) {
FCOORD result; //output
result.xcoord = op1.xcoord * scale;
result.ycoord = op1.ycoord * scale;
return result;
}
/**********************************************************************
* operator*=
*
* Scalar multiply of an FCOORD.
**********************************************************************/
inline FCOORD &
operator*= ( //scalar multiply
FCOORD & op1, //operands
float scale) {
op1.xcoord *= scale;
op1.ycoord *= scale;
return op1;
}
/**********************************************************************
* operator/
*
* Scalar divide of an FCOORD.
**********************************************************************/
inline FCOORD
operator/ ( //scalar divide
const FCOORD & op1, //operands
float scale) {
FCOORD result; //output
ASSERT_HOST(scale != 0.0f);
result.xcoord = op1.xcoord / scale;
result.ycoord = op1.ycoord / scale;
return result;
}
/**********************************************************************
* operator/=
*
* Scalar divide of an FCOORD.
**********************************************************************/
inline FCOORD &
operator/= ( //scalar divide
FCOORD & op1, //operands
float scale) {
ASSERT_HOST(scale != 0.0f);
op1.xcoord /= scale;
op1.ycoord /= scale;
return op1;
}
/**********************************************************************
* rotate
*
* Rotate an FCOORD by the given (normalized) (cos,sin) vector.
**********************************************************************/
inline void FCOORD::rotate( //rotate by vector
const FCOORD vec) {
float tmp;
tmp = xcoord * vec.x () - ycoord * vec.y ();
ycoord = ycoord * vec.x () + xcoord * vec.y ();
xcoord = tmp;
}
inline void FCOORD::unrotate(const FCOORD& vec) {
rotate(FCOORD(vec.x(), -vec.y()));
}
#endif

View File

@ -23,7 +23,6 @@
#include "coutln.h" // for C_OUTLINE
#include "errcode.h" // for ASSERT_HOST
#include "host.h" // for FALSE, TRUE
#include "ipoints.h" // for operator+=, operator*=
#include "mod128.h" // for DIR128
#include "params.h" // for BoolParam, BOOL_VAR
#include "points.h" // for ICOORD

View File

@ -24,7 +24,6 @@
#include <cmath> // for ceil, floor
#include <cstdint> // for INT16_MAX
#include <cstdio> // for FILE
#include "ipoints.h" // for operator+=, operator-=, ICOORD::rotate
#include "platform.h" // for DLLSYM
#include "points.h" // for ICOORD, FCOORD
#include "scrollview.h" // for ScrollView, ScrollView::Color

View File

@ -593,7 +593,7 @@ void STATS::plot(ScrollView* window, // to draw in
window->Pen(colour);
for (int index = 0; index < rangemax_ - rangemin_; index++) {
window->Rectangle( xorigin + xscale * index, yorigin,
window->Rectangle(xorigin + xscale * index, yorigin,
xorigin + xscale * (index + 1),
yorigin + yscale * buckets_[index]);
}

View File

@ -75,7 +75,7 @@ class STATS {
// between 6 and 13 = 9.5
double median() const; // get median of samples
// Returns the count of the given value.
int32_t pile_count(int32_t value ) const {
int32_t pile_count(int32_t value) const {
if (value <= rangemin_)
return buckets_[0];
if (value >= rangemax_ - 1)

View File

@ -26,7 +26,7 @@
#include "allheaders.h" // for pixCreate, pixGetDepth
#include "genericvector.h" // for GenericVector
#include "host.h" // for TRUE, FALSE
#include "ipoints.h" // for operator+=
#include "points.h" // for operator+=, FCOORD, ICOORD
class DENORM;

View File

@ -782,10 +782,10 @@ inline void CLIST_ITERATOR::add_to_end( // element to add
Replace <parm> with "<parm>". <parm> may be an arbitrary number of tokens
***********************************************************************/
#define QUOTE_IT( parm ) #parm
#define QUOTE_IT(parm) #parm
/***********************************************************************
CLISTIZE( CLASSNAME ) MACRO DEFINITION
CLISTIZE(CLASSNAME) MACRO DEFINITION
======================================
CLASSNAME is assumed to be the name of a class to be used in a CONS list
@ -810,7 +810,7 @@ The ...IZE macros define the code use in .c files
***********************************************************************/
/***********************************************************************
CLISTIZEH( CLASSNAME ) MACRO
CLISTIZEH(CLASSNAME) MACRO
CLISTIZEH is a concatenation of 3 fragments CLISTIZEH_A, CLISTIZEH_B and
CLISTIZEH_C.
@ -907,7 +907,7 @@ CLISTIZEH_C.
CLISTIZEH_C(CLASSNAME)
/***********************************************************************
CLISTIZE( CLASSNAME ) MACRO
CLISTIZE(CLASSNAME) MACRO
***********************************************************************/
#define CLISTIZE(CLASSNAME) \

View File

@ -826,10 +826,10 @@ inline void ELIST_ITERATOR::add_to_end( // element to add
Replace <parm> with "<parm>". <parm> may be an arbitrary number of tokens
***********************************************************************/
#define QUOTE_IT( parm ) #parm
#define QUOTE_IT(parm) #parm
/***********************************************************************
ELISTIZE( CLASSNAME ) MACRO
ELISTIZE(CLASSNAME) MACRO
============================
CLASSNAME is assumed to be the name of a class which has a baseclass of
@ -852,7 +852,7 @@ The ...IZE macros define the code use in .c files
***********************************************************************/
/***********************************************************************
ELISTIZEH( CLASSNAME ) MACRO
ELISTIZEH(CLASSNAME) MACRO
ELISTIZEH is a concatenation of 3 fragments ELISTIZEH_A, ELISTIZEH_B and
ELISTIZEH_C.
@ -893,10 +893,10 @@ private: \
DONT_CONSTRUCT_LIST_BY_COPY.error(QUOTE_IT(CLASSNAME##_LIST), ABORT, nullptr);\
} \
void operator=(const CLASSNAME##_LIST&) { \
DONT_ASSIGN_LISTS.error(QUOTE_IT(CLASSNAME##_LIST), ABORT, nullptr ); \
DONT_ASSIGN_LISTS.error(QUOTE_IT(CLASSNAME##_LIST), ABORT, nullptr); \
} \
#define ELISTIZEH_C( CLASSNAME ) \
#define ELISTIZEH_C(CLASSNAME) \
}; \
\
\
@ -945,17 +945,17 @@ class DLLSYM CLASSNAME##_IT : public ELIST_ITERATOR { \
} \
};
#define ELISTIZEH( CLASSNAME ) \
#define ELISTIZEH(CLASSNAME) \
\
ELISTIZEH_A( CLASSNAME ) \
ELISTIZEH_A(CLASSNAME) \
\
ELISTIZEH_B( CLASSNAME ) \
ELISTIZEH_B(CLASSNAME) \
\
ELISTIZEH_C( CLASSNAME )
ELISTIZEH_C(CLASSNAME)
/***********************************************************************
ELISTIZE( CLASSNAME ) MACRO
ELISTIZE(CLASSNAME) MACRO
***********************************************************************/
#define ELISTIZE(CLASSNAME) \

View File

@ -102,7 +102,7 @@ int32_t ELIST2::length() const { // count elements
*
* Sort elements on list
* NB If you don't like the const declarations in the comparator, coerce yours:
* ( int (*)(const void *, const void *)
* (int (*)(const void *, const void *)
**********************************************************************/
void

View File

@ -616,7 +616,7 @@ inline ELIST2_LINK *ELIST2_ITERATOR::extract() {
}
}
// Always set ex_current_was_cycle_pt so an add/forward will work in a loop.
ex_current_was_cycle_pt = (current == cycle_pt) ? true : false;
ex_current_was_cycle_pt = (current == cycle_pt);
extracted_link = current;
extracted_link->next = nullptr; //for safety
extracted_link->prev = nullptr; //for safety
@ -824,10 +824,10 @@ inline void ELIST2_ITERATOR::add_to_end( // element to add
Replace <parm> with "<parm>". <parm> may be an arbitrary number of tokens
***********************************************************************/
#define QUOTE_IT( parm ) #parm
#define QUOTE_IT(parm) #parm
/***********************************************************************
ELIST2IZE( CLASSNAME ) MACRO DEFINITION
ELIST2IZE(CLASSNAME) MACRO DEFINITION
======================================
CLASSNAME is assumed to be the name of a class which has a baseclass of
@ -851,7 +851,7 @@ The ...IZE macros define the code use in .c files
***********************************************************************/
/***********************************************************************
ELIST2IZEH( CLASSNAME ) MACRO
ELIST2IZEH(CLASSNAME) MACRO
ELIST2IZEH is a concatenation of 3 fragments ELIST2IZEH_A, ELIST2IZEH_B and
ELIST2IZEH_C.
@ -955,7 +955,7 @@ ELIST2IZEH_C.
ELIST2IZEH_C(CLASSNAME)
/***********************************************************************
ELIST2IZE( CLASSNAME ) MACRO
ELIST2IZE(CLASSNAME) MACRO
***********************************************************************/
#define ELIST2IZE(CLASSNAME) \

View File

@ -661,11 +661,16 @@ class GenericVectorEqEq : public GenericVector<T> {
template <typename T>
void GenericVector<T>::init(int size) {
size_used_ = 0;
size_reserved_ = 0;
data_ = nullptr;
if (size <= 0) {
data_ = nullptr;
size_reserved_ = 0;
} else {
if (size < kDefaultVectorSize) size = kDefaultVectorSize;
data_ = new T[size];
size_reserved_ = size;
}
clear_cb_ = nullptr;
compare_cb_ = nullptr;
reserve(size);
}
template <typename T>

View File

@ -142,7 +142,7 @@ class ETEXT_DESC { // output header
err_code(0),
cancel(nullptr),
progress_callback(nullptr),
progress_callback2( &default_progress_func ),
progress_callback2(&default_progress_func),
cancel_this(nullptr) {
end_time.tv_sec = 0;
end_time.tv_usec = 0;
@ -173,7 +173,7 @@ private:
static bool default_progress_func(ETEXT_DESC* ths, int left, int right, int top,
int bottom)
{
if ( ths->progress_callback ) {
if (ths->progress_callback) {
return (*(ths->progress_callback))(ths->progress, left, right, top, bottom);
}
return true;

View File

@ -360,7 +360,7 @@ static int tvfscanf(FILE* stream, const char *format, va_list ap) {
scan_int:
q = SkipSpace(stream);
if ( q <= 0 ) {
if (q <= 0) {
bail = BAIL_EOF;
break;
}
@ -471,7 +471,7 @@ static int tvfscanf(FILE* stream, const char *format, va_list ap) {
break;
case '%': // %% sequence
if (fgetc(stream) != '%' )
if (fgetc(stream) != '%')
bail = BAIL_ERR;
break;

View File

@ -34,7 +34,7 @@ class STRING;
Replace <parm> with "<parm>". <parm> may be an arbitrary number of tokens
***********************************************************************/
#define QUOTE_IT( parm ) #parm
#define QUOTE_IT(parm) #parm
namespace tesseract {

File diff suppressed because it is too large Load Diff

View File

@ -204,7 +204,7 @@ typedef double (*SOLVEFUNC) (CHISTRUCT *, double);
#define Odd(N) ((N)%2)
#define Mirror(N,R) ((R) - (N) - 1)
#define Abs(N) ( ( (N) < 0 ) ? ( -(N) ) : (N) )
#define Abs(N) (((N) < 0) ? (-(N)) : (N))
//--------------Global Data Definitions and Declarations----------------------
/** the following variables describe a discrete normal distribution
@ -477,7 +477,7 @@ SAMPLE* MakeSample(CLUSTERER * Clusterer, const float* Feature,
Clusterer->NumChar = CharID + 1;
// execute hook for monitoring clustering operation
// (*SampleCreationHook)( Sample );
// (*SampleCreationHook)(Sample);
return (Sample);
} // MakeSample
@ -2233,7 +2233,7 @@ CHISTRUCT *NewChiStruct(uint16_t DegreesOfFreedom, double Alpha) {
/**
* This routine attempts to find an x value at which Function
* goes to zero (i.e. a root of the function ). It will only
* goes to zero (i.e. a root of the function). It will only
* work correctly if a solution actually exists and there
* are no extrema between the solution and the InitialGuess.
* The algorithms used are extremely primitive.
@ -2242,7 +2242,7 @@ CHISTRUCT *NewChiStruct(uint16_t DegreesOfFreedom, double Alpha) {
* @param FunctionParams arbitrary data to pass to function
* @param InitialGuess point to start solution search at
* @param Accuracy maximum allowed error
* @return Solution of function ( x for which f(x) = 0 ).
* @return Solution of function (x for which f(x) = 0).
*/
double
Solve (SOLVEFUNC Function,
@ -2296,8 +2296,8 @@ void *FunctionParams, double InitialGuess, double Accuracy)
* from 0 to x, minus the desired area under the curve. The
* number of degrees of freedom of the chi curve is specified
* in the ChiParams structure. The desired area is also
* specified in the ChiParams structure as Alpha ( or 1 minus
* the desired area ). This routine is intended to be passed
* specified in the ChiParams structure as Alpha (or 1 minus
* the desired area). This routine is intended to be passed
* to the Solve() function to find the value of chi-squared
* which will yield a desired area under the right tail of
* the chi density curve. The function will only work for

View File

@ -354,7 +354,7 @@ void WriteProtoList(FILE* File, uint16_t N, PARAM_DESC* ParamDesc,
/* write prototypes */
iterate(ProtoList)
{
Proto = (PROTOTYPE *) first_node ( ProtoList );
Proto = (PROTOTYPE *) first_node (ProtoList);
if ((Proto->Significant && WriteSigProtos) ||
(!Proto->Significant && WriteInsigProtos))
WritePrototype(File, N, Proto);

View File

@ -28,7 +28,7 @@
#include <cmath>
#define Magnitude(X) ((X) < 0 ? -(X) : (X))
#define NodeFound(N,K,D) (( (N)->Key == (K) ) && ( (N)->Data == (D) ))
#define NodeFound(N,K,D) (((N)->Key == (K)) && ((N)->Data == (D)))
/*-----------------------------------------------------------------------------
Global Data Definitions and Declarations

View File

@ -45,7 +45,7 @@ typedef float *MICROFEATURE;
----------------------------------------------------------------------------**/
/* macros for accessing micro-feature lists */
#define NextFeatureOf(L) ( (MICROFEATURE) first_node ( L ) )
#define NextFeatureOf(L) ((MICROFEATURE)first_node(L))
/**----------------------------------------------------------------------------
Public Function Prototypes

View File

@ -253,8 +253,7 @@ void Dict::append_choices(
WERD_CHOICE *best_choice,
int *attempts_left,
void *more_args) {
int word_ending =
(char_choice_index == char_choices.length() - 1) ? true : false;
int word_ending = (char_choice_index == char_choices.length() - 1);
// Deal with fragments.
CHAR_FRAGMENT_INFO char_frag_info;

View File

@ -249,7 +249,7 @@ KERNEL(
else
lastword = *(sword + row*wpl + eiter);
for ( i = 1; i < nwords; i++)
for (i = 1; i < nwords; i++)
{
//Gets LHS words
if ((siter + i) < 0)
@ -604,7 +604,7 @@ KERNEL(
lastword = *(sword + row*wpl + eiter);
for ( i = 1; i < nwords; i++)
for (i = 1; i < nwords; i++)
{
//Gets LHS words
if ((siter + i) < 0)
@ -809,17 +809,17 @@ void kernel_HistogramRectAllChannels(
int threadOffset = get_global_id(0)%HIST_REDUNDANCY;
// for each pixel/channel, accumulate in global memory
for ( uint pc = get_global_id(0); pc < numPixels*NUM_CHANNELS/HR_UNROLL_SIZE; pc += get_global_size(0) ) {
for (uint pc = get_global_id(0); pc < numPixels*NUM_CHANNELS/HR_UNROLL_SIZE; pc += get_global_size(0)) {
pixels = data[pc];
// channel bin thread
atomic_inc( &histBuffer[ 0*HIST_SIZE*HIST_REDUNDANCY + pixels.s0*HIST_REDUNDANCY + threadOffset ]); // ch0
atomic_inc( &histBuffer[ 0*HIST_SIZE*HIST_REDUNDANCY + pixels.s4*HIST_REDUNDANCY + threadOffset ]); // ch0
atomic_inc( &histBuffer[ 1*HIST_SIZE*HIST_REDUNDANCY + pixels.s1*HIST_REDUNDANCY + threadOffset ]); // ch1
atomic_inc( &histBuffer[ 1*HIST_SIZE*HIST_REDUNDANCY + pixels.s5*HIST_REDUNDANCY + threadOffset ]); // ch1
atomic_inc( &histBuffer[ 2*HIST_SIZE*HIST_REDUNDANCY + pixels.s2*HIST_REDUNDANCY + threadOffset ]); // ch2
atomic_inc( &histBuffer[ 2*HIST_SIZE*HIST_REDUNDANCY + pixels.s6*HIST_REDUNDANCY + threadOffset ]); // ch2
atomic_inc( &histBuffer[ 3*HIST_SIZE*HIST_REDUNDANCY + pixels.s3*HIST_REDUNDANCY + threadOffset ]); // ch3
atomic_inc( &histBuffer[ 3*HIST_SIZE*HIST_REDUNDANCY + pixels.s7*HIST_REDUNDANCY + threadOffset ]); // ch3
atomic_inc(&histBuffer[0*HIST_SIZE*HIST_REDUNDANCY + pixels.s0*HIST_REDUNDANCY + threadOffset]); // ch0
atomic_inc(&histBuffer[0*HIST_SIZE*HIST_REDUNDANCY + pixels.s4*HIST_REDUNDANCY + threadOffset]); // ch0
atomic_inc(&histBuffer[1*HIST_SIZE*HIST_REDUNDANCY + pixels.s1*HIST_REDUNDANCY + threadOffset]); // ch1
atomic_inc(&histBuffer[1*HIST_SIZE*HIST_REDUNDANCY + pixels.s5*HIST_REDUNDANCY + threadOffset]); // ch1
atomic_inc(&histBuffer[2*HIST_SIZE*HIST_REDUNDANCY + pixels.s2*HIST_REDUNDANCY + threadOffset]); // ch2
atomic_inc(&histBuffer[2*HIST_SIZE*HIST_REDUNDANCY + pixels.s6*HIST_REDUNDANCY + threadOffset]); // ch2
atomic_inc(&histBuffer[3*HIST_SIZE*HIST_REDUNDANCY + pixels.s3*HIST_REDUNDANCY + threadOffset]); // ch3
atomic_inc(&histBuffer[3*HIST_SIZE*HIST_REDUNDANCY + pixels.s7*HIST_REDUNDANCY + threadOffset]); // ch3
}
}
)
@ -838,17 +838,17 @@ void kernel_HistogramRectOneChannel(
int threadOffset = get_global_id(0)%HIST_REDUNDANCY;
// for each pixel/channel, accumulate in global memory
for ( uint pc = get_global_id(0); pc < numPixels/HR_UNROLL_SIZE; pc += get_global_size(0) ) {
for (uint pc = get_global_id(0); pc < numPixels/HR_UNROLL_SIZE; pc += get_global_size(0)) {
pixels = data[pc];
// bin thread
atomic_inc( &histBuffer[ pixels.s0*HIST_REDUNDANCY + threadOffset ]);
atomic_inc( &histBuffer[ pixels.s1*HIST_REDUNDANCY + threadOffset ]);
atomic_inc( &histBuffer[ pixels.s2*HIST_REDUNDANCY + threadOffset ]);
atomic_inc( &histBuffer[ pixels.s3*HIST_REDUNDANCY + threadOffset ]);
atomic_inc( &histBuffer[ pixels.s4*HIST_REDUNDANCY + threadOffset ]);
atomic_inc( &histBuffer[ pixels.s5*HIST_REDUNDANCY + threadOffset ]);
atomic_inc( &histBuffer[ pixels.s6*HIST_REDUNDANCY + threadOffset ]);
atomic_inc( &histBuffer[ pixels.s7*HIST_REDUNDANCY + threadOffset ]);
atomic_inc(&histBuffer[pixels.s0*HIST_REDUNDANCY + threadOffset]);
atomic_inc(&histBuffer[pixels.s1*HIST_REDUNDANCY + threadOffset]);
atomic_inc(&histBuffer[pixels.s2*HIST_REDUNDANCY + threadOffset]);
atomic_inc(&histBuffer[pixels.s3*HIST_REDUNDANCY + threadOffset]);
atomic_inc(&histBuffer[pixels.s4*HIST_REDUNDANCY + threadOffset]);
atomic_inc(&histBuffer[pixels.s5*HIST_REDUNDANCY + threadOffset]);
atomic_inc(&histBuffer[pixels.s6*HIST_REDUNDANCY + threadOffset]);
atomic_inc(&histBuffer[pixels.s7*HIST_REDUNDANCY + threadOffset]);
}
}
)
@ -870,7 +870,7 @@ void kernel_HistogramRectAllChannelsReduction(
int value = 0;
// accumulate in register
for ( uint i = get_local_id(0); i < HIST_REDUNDANCY; i+=GROUP_SIZE) {
for (uint i = get_local_id(0); i < HIST_REDUNDANCY; i+=GROUP_SIZE) {
value += histBuffer[ channel*HIST_SIZE*HIST_REDUNDANCY+bin*HIST_REDUNDANCY+i];
}
@ -912,7 +912,7 @@ void kernel_HistogramRectOneChannelReduction(
int value = 0;
// accumulate in register
for ( int i = get_local_id(0); i < HIST_REDUNDANCY; i+=GROUP_SIZE) {
for (int i = get_local_id(0); i < HIST_REDUNDANCY; i+=GROUP_SIZE) {
value += histBuffer[ bin*HIST_REDUNDANCY+i];
}
@ -966,16 +966,16 @@ void kernel_ThresholdRectToPix(
// declare variables
int pThresholds[NUM_CHANNELS];
int pHi_Values[NUM_CHANNELS];
for ( int i = 0; i < NUM_CHANNELS; i++) {
for (int i = 0; i < NUM_CHANNELS; i++) {
pThresholds[i] = thresholds[i];
pHi_Values[i] = hi_values[i];
}
// for each word (32 pixels) in output image
for ( uint w = get_global_id(0); w < wpl*height; w += get_global_size(0) ) {
for (uint w = get_global_id(0); w < wpl*height; w += get_global_size(0)) {
unsigned int word = 0; // all bits start at zero
// for each burst in word
for ( int b = 0; b < BURSTS_PER_WORD; b++) {
for (int b = 0; b < BURSTS_PER_WORD; b++) {
// load burst
charVec pixels;
int offset = (w / wpl) * width;
@ -986,8 +986,8 @@ void kernel_ThresholdRectToPix(
pixels.v[i] = imageData[offset + i];
// for each pixel in burst
for ( int p = 0; p < PIXELS_PER_BURST; p++) {
for ( int c = 0; c < NUM_CHANNELS; c++) {
for (int p = 0; p < PIXELS_PER_BURST; p++) {
for (int c = 0; c < NUM_CHANNELS; c++) {
unsigned char pixChan = pixels.s[p*NUM_CHANNELS + c];
if (pHi_Values[c] >= 0 && (pixChan > pThresholds[c]) == (pHi_Values[c] == 0)) {
const uint kTopBit = 0x80000000;
@ -1023,17 +1023,17 @@ void kernel_ThresholdRectToPix_OneChan(
// declare variables
int pThresholds[1];
int pHi_Values[1];
for ( int i = 0; i < 1; i++) {
for (int i = 0; i < 1; i++) {
pThresholds[i] = thresholds[i];
pHi_Values[i] = hi_values[i];
}
// for each word (32 pixels) in output image
for ( uint w = get_global_id(0); w < wpl*height; w += get_global_size(0) ) {
for (uint w = get_global_id(0); w < wpl*height; w += get_global_size(0)) {
unsigned int word = 0; // all bits start at zero
// for each burst in word
for ( int b = 0; b < BURSTS_PER_WORD; b++) {
for (int b = 0; b < BURSTS_PER_WORD; b++) {
// load burst
charVec1 pixels;
@ -1044,7 +1044,7 @@ void kernel_ThresholdRectToPix_OneChan(
+ 0 ];
// for each pixel in burst
for ( int p = 0; p < PIXELS_PER_BURST; p++) {
for (int p = 0; p < PIXELS_PER_BURST; p++) {
//int littleEndianIdx = p ^ 3;
//int bigEndianIdx = p;

View File

@ -553,7 +553,7 @@ static ds_status writeProfileToFile(ds_profile* profile,
// substitute invalid characters in device name with _
static void legalizeFileName(char* fileName) {
// printf("fileName: %s\n", fileName);
// tprintf("fileName: %s\n", fileName);
const char* invalidChars =
"/\?:*\"><| "; // space is valid but can cause headaches
// for each invalid char
@ -561,22 +561,22 @@ static void legalizeFileName(char* fileName) {
char invalidStr[4];
invalidStr[0] = invalidChars[i];
invalidStr[1] = '\0';
// printf("eliminating %s\n", invalidStr);
// tprintf("eliminating %s\n", invalidStr);
// char *pos = strstr(fileName, invalidStr);
// initial ./ is valid for present directory
// if (*pos == '.') pos++;
// if (*pos == '/') pos++;
for (char* pos = strstr(fileName, invalidStr); pos != nullptr;
pos = strstr(pos + 1, invalidStr)) {
// printf("\tfound: %s, ", pos);
// tprintf("\tfound: %s, ", pos);
pos[0] = '_';
// printf("fileName: %s\n", fileName);
// tprintf("fileName: %s\n", fileName);
}
}
}
static void populateGPUEnvFromDevice(GPUEnv* gpuInfo, cl_device_id device) {
// printf("[DS] populateGPUEnvFromDevice\n");
// tprintf("[DS] populateGPUEnvFromDevice\n");
size_t size;
gpuInfo->mnIsUserCreated = 1;
// device
@ -713,7 +713,7 @@ int OpenclDevice::initMorphCLAllocations(l_int32 wpl, l_int32 h, Pix* pixs) {
int OpenclDevice::InitEnv() {
// PERF_COUNT_START("OD::InitEnv")
// printf("[OD] OpenclDevice::InitEnv()\n");
// tprintf("[OD] OpenclDevice::InitEnv()\n");
#ifdef SAL_WIN32
while (1) {
if (1 == LoadOpencl()) break;
@ -763,7 +763,7 @@ int OpenclDevice::InitOpenclRunEnv_DeviceSelection(int argc) {
cl_device_id bestDevice = bestDevice_DS.oclDeviceID;
// overwrite global static GPUEnv with new device
if (selectedDeviceIsOpenCL()) {
// printf("[DS] InitOpenclRunEnv_DS::Calling populateGPUEnvFromDevice()
// tprintf("[DS] InitOpenclRunEnv_DS::Calling populateGPUEnvFromDevice()
// for selected device\n");
populateGPUEnvFromDevice(&gpuEnv, bestDevice);
gpuEnv.mnFileCount = 0; // argc;
@ -772,7 +772,7 @@ int OpenclDevice::InitOpenclRunEnv_DeviceSelection(int argc) {
CompileKernelFile(&gpuEnv, "");
// PERF_COUNT_SUB("CompileKernelFile")
} else {
// printf("[DS] InitOpenclRunEnv_DS::Skipping populateGPUEnvFromDevice()
// tprintf("[DS] InitOpenclRunEnv_DS::Skipping populateGPUEnvFromDevice()
// b/c native cpu selected\n");
}
isInited = 1;
@ -925,10 +925,10 @@ int OpenclDevice::GeneratBinFromKernelSource(cl_program program,
sprintf(fileName, "%s-%s.bin", cl_name, deviceName);
legalizeFileName(fileName);
if (!WriteBinaryToFile(fileName, binaries[i], binarySizes[i])) {
printf("[OD] write binary[%s] failed\n", fileName);
tprintf("[OD] write binary[%s] failed\n", fileName);
return 0;
} // else
printf("[OD] write binary[%s] successfully\n", fileName);
tprintf("[OD] write binary[%s] successfully\n", fileName);
}
}
@ -1017,7 +1017,7 @@ int OpenclDevice::CompileKernelFile(GPUEnv* gpuInfo, const char* buildOption) {
// char options[512];
// create a cl program executable for all the devices specified
// printf("[OD] BuildProgram.\n");
// tprintf("[OD] BuildProgram.\n");
PERF_COUNT_START("OD::CompileKernel::clBuildProgram")
if (!gpuInfo->mnIsUserCreated) {
clStatus =
@ -1032,7 +1032,7 @@ int OpenclDevice::CompileKernelFile(GPUEnv* gpuInfo, const char* buildOption) {
}
PERF_COUNT_END
if (clStatus != CL_SUCCESS) {
printf("BuildProgram error!\n");
tprintf("BuildProgram error!\n");
size_t length;
if (!gpuInfo->mnIsUserCreated) {
clStatus = clGetProgramBuildInfo(
@ -1044,7 +1044,7 @@ int OpenclDevice::CompileKernelFile(GPUEnv* gpuInfo, const char* buildOption) {
CL_PROGRAM_BUILD_LOG, 0, nullptr, &length);
}
if (clStatus != CL_SUCCESS) {
printf("opencl create build log fail\n");
tprintf("opencl create build log fail\n");
return 0;
}
std::vector<char> buildLog(length);
@ -1058,7 +1058,7 @@ int OpenclDevice::CompileKernelFile(GPUEnv* gpuInfo, const char* buildOption) {
length, &buildLog[0], &length);
}
if (clStatus != CL_SUCCESS) {
printf("opencl program build info fail\n");
tprintf("opencl program build info fail\n");
return 0;
}
@ -1890,7 +1890,7 @@ int OpenclDevice::ThresholdRectToPixOCL(unsigned char* imageData,
clFinish(rEnv.mpkCmdQueue);
PERF_COUNT_SUB("kernel")
if (clStatus != 0) {
printf("Setting return value to -1\n");
tprintf("Setting return value to -1\n");
retVal = -1;
}
/* map results back from gpu */
@ -1954,7 +1954,7 @@ static void populateTessScoreEvaluationInputData(
for (int i = 0; i < numLines; i++) {
int lineWidth = rand() % maxLineWidth;
int vertLinePos = lineWidth + rand() % (width - 2 * lineWidth);
// printf("[PI] VerticalLine @ %i (w=%i)\n", vertLinePos, lineWidth);
// tprintf("[PI] VerticalLine @ %i (w=%i)\n", vertLinePos, lineWidth);
for (int row = vertLinePos - lineWidth / 2;
row < vertLinePos + lineWidth / 2; row++) {
for (int col = 0; col < height; col++) {
@ -1970,13 +1970,13 @@ static void populateTessScoreEvaluationInputData(
for (int i = 0; i < numLines; i++) {
int lineWidth = rand() % maxLineWidth;
int horLinePos = lineWidth + rand() % (height - 2 * lineWidth);
// printf("[PI] HorizontalLine @ %i (w=%i)\n", horLinePos, lineWidth);
// tprintf("[PI] HorizontalLine @ %i (w=%i)\n", horLinePos, lineWidth);
for (int row = 0; row < width; row++) {
for (int col = horLinePos - lineWidth / 2;
col < horLinePos + lineWidth / 2;
col++) { // for (int row = vertLinePos-lineWidth/2; row <
// vertLinePos+lineWidth/2; row++) {
// printf("[PI] HoizLine pix @ (%3i, %3i)\n", row, col);
// tprintf("[PI] HoizLine pix @ (%3i, %3i)\n", row, col);
// imageData4[row*width+col] = pixelBlack;
imageData4[row * width + col][0] = pixelBlack[0];
imageData4[row * width + col][1] = pixelBlack[1];
@ -1993,10 +1993,10 @@ static void populateTessScoreEvaluationInputData(
int lineWidth = rand() % maxLineWidth;
int col = lineWidth + rand() % (width - 2 * lineWidth);
int row = lineWidth + rand() % (height - 2 * lineWidth);
// printf("[PI] Spot[%i/%i] @ (%3i, %3i)\n", i, numSpots, row, col );
// tprintf("[PI] Spot[%i/%i] @ (%3i, %3i)\n", i, numSpots, row, col );
for (int r = row - lineWidth / 2; r < row + lineWidth / 2; r++) {
for (int c = col - lineWidth / 2; c < col + lineWidth / 2; c++) {
// printf("[PI] \tSpot[%i/%i] @ (%3i, %3i)\n", i, numSpots, r, c );
// tprintf("[PI] \tSpot[%i/%i] @ (%3i, %3i)\n", i, numSpots, r, c );
// imageData4[row*width+col] = pixelBlack;
imageData4[r * width + c][0] = pixelBlack[0];
imageData4[r * width + c][1] = pixelBlack[1];
@ -2444,17 +2444,17 @@ static ds_status releaseScore(TessDeviceScore* score) {
static ds_status evaluateScoreForDevice(ds_device* device, void* inputData) {
// overwrite statuc gpuEnv w/ current device
// so native opencl calls can be used; they use static gpuEnv
printf("\n[DS] Device: \"%s\" (%s) evaluation...\n", device->oclDeviceName,
tprintf("\n[DS] Device: \"%s\" (%s) evaluation...\n", device->oclDeviceName,
device->type == DS_DEVICE_OPENCL_DEVICE ? "OpenCL" : "Native");
GPUEnv* env = nullptr;
if (device->type == DS_DEVICE_OPENCL_DEVICE) {
env = &OpenclDevice::gpuEnv;
memset(env, 0, sizeof(*env));
// printf("[DS] populating tmp GPUEnv from device\n");
// tprintf("[DS] populating tmp GPUEnv from device\n");
populateGPUEnvFromDevice(env, device->oclDeviceID);
env->mnFileCount = 0; // argc;
env->mnKernelCount = 0UL;
// printf("[DS] compiling kernels for tmp GPUEnv\n");
// tprintf("[DS] compiling kernels for tmp GPUEnv\n");
OpenclDevice::CompileKernelFile(env, "");
}
@ -2490,17 +2490,17 @@ static ds_status evaluateScoreForDevice(ds_device* device, void* inputData) {
device->score = new TessDeviceScore;
device->score->time = weightedTime;
printf("[DS] Device: \"%s\" (%s) evaluated\n", device->oclDeviceName,
tprintf("[DS] Device: \"%s\" (%s) evaluated\n", device->oclDeviceName,
device->type == DS_DEVICE_OPENCL_DEVICE ? "OpenCL" : "Native");
printf("[DS]%25s: %f (w=%.1f)\n", "composeRGBPixel", composeRGBPixelTime,
tprintf("[DS]%25s: %f (w=%.1f)\n", "composeRGBPixel", composeRGBPixelTime,
composeRGBPixelWeight);
printf("[DS]%25s: %f (w=%.1f)\n", "HistogramRect", histogramRectTime,
tprintf("[DS]%25s: %f (w=%.1f)\n", "HistogramRect", histogramRectTime,
histogramRectWeight);
printf("[DS]%25s: %f (w=%.1f)\n", "ThresholdRectToPix",
tprintf("[DS]%25s: %f (w=%.1f)\n", "ThresholdRectToPix",
thresholdRectToPixTime, thresholdRectToPixWeight);
printf("[DS]%25s: %f (w=%.1f)\n", "getLineMasksMorph", getLineMasksMorphTime,
tprintf("[DS]%25s: %f (w=%.1f)\n", "getLineMasksMorph", getLineMasksMorphTime,
getLineMasksMorphWeight);
printf("[DS]%25s: %f\n", "Score", device->score->time);
tprintf("[DS]%25s: %f\n", "Score", device->score->time);
return DS_SUCCESS;
}
@ -2522,7 +2522,7 @@ ds_device OpenclDevice::getDeviceSelection() {
status = readProfileFromFile(profile, deserializeScore, fileName);
if (status != DS_SUCCESS) {
// need to run evaluation
printf("[DS] Profile file not available (%s); performing profiling.\n",
tprintf("[DS] Profile file not available (%s); performing profiling.\n",
fileName);
// create input data
@ -2539,21 +2539,21 @@ ds_device OpenclDevice::getDeviceSelection() {
status = writeProfileToFile(profile, serializeScore, fileName);
PERF_COUNT_SUB("writeProfileToFile")
if (status == DS_SUCCESS) {
printf("[DS] Scores written to file (%s).\n", fileName);
tprintf("[DS] Scores written to file (%s).\n", fileName);
} else {
printf(
tprintf(
"[DS] Error saving scores to file (%s); scores not written to "
"file.\n",
fileName);
}
} else {
printf(
tprintf(
"[DS] Unable to evaluate performance; scores not written to "
"file.\n");
}
} else {
PERF_COUNT_SUB("readProfileFromFile")
printf("[DS] Profile read from file (%s).\n", fileName);
tprintf("[DS] Profile read from file (%s).\n", fileName);
}
// we now have device scores either from file or evaluation
@ -2566,14 +2566,14 @@ ds_device OpenclDevice::getDeviceSelection() {
TessDeviceScore score = *device.score;
float time = score.time;
printf("[DS] Device[%u] %i:%s score is %f\n", d + 1, device.type,
tprintf("[DS] Device[%u] %i:%s score is %f\n", d + 1, device.type,
device.oclDeviceName, time);
if (time < bestTime) {
bestTime = time;
bestDeviceIdx = d;
}
}
printf("[DS] Selected Device[%i]: \"%s\" (%s)\n", bestDeviceIdx + 1,
tprintf("[DS] Selected Device[%i]: \"%s\" (%s)\n", bestDeviceIdx + 1,
profile->devices[bestDeviceIdx].oclDeviceName,
profile->devices[bestDeviceIdx].type == DS_DEVICE_OPENCL_DEVICE
? "OpenCL"
@ -2586,14 +2586,14 @@ ds_device OpenclDevice::getDeviceSelection() {
if (overrideDeviceStr != nullptr) {
int overrideDeviceIdx = atoi(overrideDeviceStr);
if (overrideDeviceIdx > 0 && overrideDeviceIdx <= profile->numDevices) {
printf(
tprintf(
"[DS] Overriding Device Selection (TESSERACT_OPENCL_DEVICE=%s, "
"%i)\n",
overrideDeviceStr, overrideDeviceIdx);
bestDeviceIdx = overrideDeviceIdx - 1;
overridden = true;
} else {
printf(
tprintf(
"[DS] Ignoring invalid TESSERACT_OPENCL_DEVICE=%s ([1,%i] are "
"valid devices).\n",
overrideDeviceStr, profile->numDevices);
@ -2601,7 +2601,7 @@ ds_device OpenclDevice::getDeviceSelection() {
}
if (overridden) {
printf("[DS] Overridden Device[%i]: \"%s\" (%s)\n", bestDeviceIdx + 1,
tprintf("[DS] Overridden Device[%i]: \"%s\" (%s)\n", bestDeviceIdx + 1,
profile->devices[bestDeviceIdx].oclDeviceName,
profile->devices[bestDeviceIdx].type == DS_DEVICE_OPENCL_DEVICE
? "OpenCL"
@ -2612,7 +2612,7 @@ ds_device OpenclDevice::getDeviceSelection() {
releaseDSProfile(profile, releaseScore);
} else {
// opencl isn't available at runtime, select native cpu device
printf("[DS] OpenCL runtime not available.\n");
tprintf("[DS] OpenCL runtime not available.\n");
selectedDevice.type = DS_DEVICE_NATIVE_CPU;
selectedDevice.oclDeviceName = "(null)";
selectedDevice.score = nullptr;

View File

@ -95,7 +95,7 @@
QueryPerformanceCounter(&time_funct_end); \
elapsed_time_sec = (time_funct_end.QuadPart - time_funct_start.QuadPart) / \
(double)(freq.QuadPart); \
printf(PERF_COUNT_REPORT_STR, funct_name, "total", elapsed_time_sec);
tprintf(PERF_COUNT_REPORT_STR, funct_name, "total", elapsed_time_sec);
#else
#define PERF_COUNT_START(FUNCT_NAME)
#define PERF_COUNT_END
@ -106,7 +106,7 @@
QueryPerformanceCounter(&time_sub_end); \
elapsed_time_sec = (time_sub_end.QuadPart - time_sub_start.QuadPart) / \
(double)(freq.QuadPart); \
printf(PERF_COUNT_REPORT_STR, funct_name, SUB, elapsed_time_sec); \
tprintf(PERF_COUNT_REPORT_STR, funct_name, SUB, elapsed_time_sec); \
time_sub_start = time_sub_end;
#else
#define PERF_COUNT_SUB(SUB)
@ -129,7 +129,7 @@
elapsed_time_sec = \
(time_funct_end.tv_sec - time_funct_start.tv_sec) * 1.0 + \
(time_funct_end.tv_nsec - time_funct_start.tv_nsec) / 1000000000.0; \
printf(PERF_COUNT_REPORT_STR, funct_name, "total", elapsed_time_sec);
tprintf(PERF_COUNT_REPORT_STR, funct_name, "total", elapsed_time_sec);
#else
#define PERF_COUNT_START(FUNCT_NAME)
#define PERF_COUNT_END
@ -141,7 +141,7 @@
elapsed_time_sec = \
(time_sub_end.tv_sec - time_sub_start.tv_sec) * 1.0 + \
(time_sub_end.tv_nsec - time_sub_start.tv_nsec) / 1000000000.0; \
printf(PERF_COUNT_REPORT_STR, funct_name, SUB, elapsed_time_sec); \
tprintf(PERF_COUNT_REPORT_STR, funct_name, SUB, elapsed_time_sec); \
time_sub_start = time_sub_end;
#else
#define PERF_COUNT_SUB(SUB)
@ -187,7 +187,7 @@ typedef int (*cl_kernel_function)(void** userdata, KernelEnv* kenv);
#define CHECK_OPENCL(status, name) \
if (status != CL_SUCCESS) { \
printf("OpenCL error code is %d at when %s .\n", status, name); \
tprintf("OpenCL error code is %d at when %s .\n", status, name); \
}
struct GPUEnv {

View File

@ -65,8 +65,8 @@ class REGION_OCC:public ELIST_LINK
};
ELISTIZEH (REGION_OCC)
#define RANGE_IN_BAND( band_max, band_min, range_max, range_min ) \
( ((range_min) >= (band_min)) && ((range_max) < (band_max)) ) ? true : false
#define RANGE_IN_BAND(band_max, band_min, range_max, range_min) \
(((range_min) >= (band_min)) && ((range_max) < (band_max)))
/************************************************************************
Adapted from the following procedure so that it can be used in the bands
class in an include file...
@ -78,14 +78,14 @@ int16_t band_min,
int16_t range_max,
int16_t range_min]
{
if ( (range_min >= band_min) && (range_max < band_max) )
if ((range_min >= band_min) && (range_max < band_max))
return TRUE;
else
return FALSE;
}
***********************************************************************/
#define RANGE_OVERLAPS_BAND( band_max, band_min, range_max, range_min ) \
( ((range_max) >= (band_min)) && ((range_min) < (band_max)) ) ? true : false
#define RANGE_OVERLAPS_BAND(band_max, band_min, range_max, range_min) \
(((range_max) >= (band_min)) && ((range_min) < (band_max)))
/************************************************************************
Adapted from the following procedure so that it can be used in the bands
class in an include file...
@ -97,7 +97,7 @@ int16_t band_min,
int16_t range_max,
int16_t range_min]
{
if ( (range_max >= band_min) && (range_min < band_max) )
if ((range_max >= band_min) && (range_min < band_max))
return TRUE;
else
return FALSE;

View File

@ -131,7 +131,7 @@ void ColPartitionSet::ImproveColumnCandidate(WidthCallback* cb,
int col_box_left = col_part->BoxLeftKey();
bool tab_width_ok = cb->Run(part->KeyWidth(col_left, part_right));
bool box_width_ok = cb->Run(part->KeyWidth(col_box_left, part_right));
if (tab_width_ok || (!part_width_ok )) {
if (tab_width_ok || (!part_width_ok)) {
// The tab is leaving the good column metric at least as good as
// it was before, so use the tab.
part->CopyLeftTab(*col_part, false);
@ -152,7 +152,7 @@ void ColPartitionSet::ImproveColumnCandidate(WidthCallback* cb,
int col_box_right = col_part->BoxRightKey();
bool tab_width_ok = cb->Run(part->KeyWidth(part_left, col_right));
bool box_width_ok = cb->Run(part->KeyWidth(part_left, col_box_right));
if (tab_width_ok || (!part_width_ok )) {
if (tab_width_ok || (!part_width_ok)) {
// The tab is leaving the good column metric at least as good as
// it was before, so use the tab.
part->CopyRightTab(*col_part, false);

View File

@ -591,7 +591,7 @@ void LineFinder::GetLineMasks(int resolution, Pix* src_pix,
int clStatus = OpenclDevice::initMorphCLAllocations(pixGetWpl(src_pix),
pixGetHeight(src_pix),
src_pix);
bool getpixclosed = pix_music_mask != nullptr ? true : false;
bool getpixclosed = pix_music_mask != nullptr;
OpenclDevice::pixGetLinesCL(nullptr, src_pix, pix_vline, pix_hline,
&pix_closed, getpixclosed, closing_brick,
closing_brick, max_line_width, max_line_width,

View File

@ -209,7 +209,7 @@ int TabFind::GutterWidth(int bottom_y, int top_y, const TabVector& v,
void TabFind::GutterWidthAndNeighbourGap(int tab_x, int mean_height,
int max_gutter, bool left,
BLOBNBOX* bbox, int* gutter_width,
int* neighbour_gap ) {
int* neighbour_gap) {
const TBOX& box = bbox->bounding_box();
// The gutter and internal sides of the box.
int gutter_x = left ? box.left() : box.right();

View File

@ -808,7 +808,7 @@ int16_t Textord::stats_count_under(STATS *stats, int16_t threshold) {
* (I.e. reasonably large space and kn:sp ratio)
* && > 3/4 # gaps < kn + (sp - kn)/3
* (I.e. most gaps are well away from space estimate)
* && a gap of max( 3, (sp - kn)/3 ) empty histogram positions is found
* && a gap of max(3, (sp - kn) / 3) empty histogram positions is found
* somewhere in the histogram between kn and sp
* THEN set the threshold and fuzzy limits to this gap - ie NO fuzzies
* NO!!!!! the bristol line has "11" with a gap of 12 between the 1's!!!
@ -836,7 +836,7 @@ void Textord::improve_row_threshold(TO_ROW *row, STATS *all_gap_stats) {
tprintf (" 1");
/*
Look for the first region of all 0's in the histogram which is wider than
max( 3, (sp - kn)/3 ) and starts between kn and sp. If found, and current
max(3, (sp - kn) / 3) and starts between kn and sp. If found, and current
threshold is not within it, move the threshold so that is is just inside it.
*/
reqd_zero_width = (int16_t) floor ((sp - kn) / 3 + 0.5);
@ -1493,9 +1493,9 @@ bool Textord::make_a_word_break(
/* Heuristics to turn dubious kerns to spaces */
/* TRIED THIS BUT IT MADE THINGS WORSE
if ( prev_gap == INT16_MAX )
if (prev_gap == INT16_MAX)
prev_gap = 0; // start of row
if ( next_gap == INT16_MAX )
if (next_gap == INT16_MAX)
next_gap = 0; // end of row
*/
if ((prev_blob_box.width () > 0) &&

View File

@ -251,9 +251,9 @@ static void WriteProtos(FILE* File, uint16_t N, LIST ProtoList,
// write prototypes
iterate(ProtoList)
{
Proto = (PROTOTYPE *) first_node ( ProtoList );
if (( Proto->Significant && WriteSigProtos ) ||
( ! Proto->Significant && WriteInsigProtos ) )
WritePrototype( File, N, Proto );
Proto = (PROTOTYPE*)first_node(ProtoList);
if ((Proto->Significant && WriteSigProtos) ||
(! Proto->Significant && WriteInsigProtos))
WritePrototype(File, N, Proto);
}
} // WriteProtos

View File

@ -23,7 +23,7 @@
#include "unicharset_training_utils.h"
STRING_PARAM_FLAG(input_unicharset, "",
"Unicharset to complete and use in encoding");
"Filename with unicharset to complete and use in encoding");
STRING_PARAM_FLAG(script_dir, "",
"Directory name for input script unicharsets");
STRING_PARAM_FLAG(words, "",
@ -40,26 +40,17 @@ BOOL_PARAM_FLAG(pass_through_recoder, false,
" unicharset. Otherwise, potentially a compression of it");
int main(int argc, char** argv) {
// Sets properties on the input unicharset file, and writes:
// rootdir/lang/lang.charset_size=ddd.txt
// rootdir/lang/lang.traineddata
// rootdir/lang/lang.unicharset
// If the 3 word lists are provided, the dawgs are also added
// to the traineddata file.
// The output unicharset and charset_size files are just for
// human readability.
tesseract::CheckSharedLibraryVersion();
tesseract::ParseCommandLineFlags(argv[0], &argc, &argv, true);
// Check validity of input flags.
if (FLAGS_input_unicharset.empty() || FLAGS_script_dir.empty() ||
FLAGS_output_dir.empty() || FLAGS_lang.empty()) {
tprintf("Usage: %s --input_unicharset filename --script_dir dirname\n",
argv[0]);
tprintf(" --output_dir rootdir --lang lang [--lang_is_rtl]\n");
tprintf(" [--words file --puncs file --numbers file]\n");
tprintf("Sets properties on the input unicharset file, and writes:\n");
tprintf("rootdir/lang/lang.charset_size=ddd.txt\n");
tprintf("rootdir/lang/lang.traineddata\n");
tprintf("rootdir/lang/lang.unicharset\n");
tprintf("If the 3 word lists are provided, the dawgs are also added to");
tprintf(" the traineddata file.\n");
tprintf("The output unicharset and charset_size files are just for human");
tprintf(" readability.\n");
exit(1);
}
GenericVector<STRING> words, puncs, numbers;
// If these reads fail, we get a warning message and an empty list of words.
tesseract::ReadFile(FLAGS_words.c_str(), nullptr).split('\n', &words);

View File

@ -851,11 +851,9 @@ void AddToNormProtosList(
int NumberOfProtos(LIST ProtoList, bool CountSigProtos,
bool CountInsigProtos) {
int N = 0;
PROTOTYPE* Proto;
iterate(ProtoList)
{
Proto = (PROTOTYPE *) first_node ( ProtoList );
PROTOTYPE* Proto = (PROTOTYPE*)first_node(ProtoList);
if ((Proto->Significant && CountSigProtos) ||
(!Proto->Significant && CountInsigProtos))
N++;

View File

@ -152,8 +152,8 @@ void PangoFontInfo::HardInitFontConfig(const std::string& fonts_dir,
#ifdef _WIN32
std::string env("FONTCONFIG_PATH=");
env.append(cache_dir_.c_str());
putenv(env.c_str());
putenv("LANG=en_US.utf8");
_putenv(env.c_str());
_putenv("LANG=en_US.utf8");
#else
setenv("FONTCONFIG_PATH", cache_dir_.c_str(), true);
// Fix the locale so that the reported font names are consistent.

View File

@ -14,39 +14,47 @@
# Tesseract. For a detailed description of the phases, see
# https://github.com/tesseract-ocr/tesseract/wiki/TrainingTesseract
#
# USAGE:
#
# tesstrain.sh
# --fontlist FONTS # A list of fontnames to train on.
# --fonts_dir FONTS_PATH # Path to font files.
# --lang LANG_CODE # ISO 639 code.
# --langdata_dir DATADIR # Path to tesseract/training/langdata directory.
# --output_dir OUTPUTDIR # Location of output traineddata file.
# --save_box_tiff # Save box/tiff pairs along with lstmf files.
# --overwrite # Safe to overwrite files in output_dir.
# --linedata_only # Only generate training data for lstmtraining.
# --run_shape_clustering # Run shape clustering (use for Indic langs).
# --exposures EXPOSURES # A list of exposure levels to use (e.g. "-1 0 1").
#
# OPTIONAL flags for input data. If unspecified we will look for them in
# the langdata_dir directory.
# --training_text TEXTFILE # Text to render and use for training.
# --wordlist WORDFILE # Word list for the language ordered by
# # decreasing frequency.
#
# OPTIONAL flag to specify location of existing traineddata files, required
# during feature extraction. If unspecified will use TESSDATA_PREFIX defined in
# the current environment.
# --tessdata_dir TESSDATADIR # Path to tesseract/tessdata directory.
#
# NOTE:
# The font names specified in --fontlist need to be recognizable by Pango using
# fontconfig. An easy way to list the canonical names of all fonts available on
# your system is to run text2image with --list_available_fonts and the
# appropriate --fonts_dir path.
display_usage() {
echo -e "USAGE: tesstrain.sh
--fontlist FONTS # A list of fontnames to train on.
--fonts_dir FONTS_PATH # Path to font files.
--lang LANG_CODE # ISO 639 code.
--langdata_dir DATADIR # Path to tesseract/training/langdata directory.
--output_dir OUTPUTDIR # Location of output traineddata file.
--save_box_tiff # Save box/tiff pairs along with lstmf files.
--overwrite # Safe to overwrite files in output_dir.
--linedata_only # Only generate training data for lstmtraining.
--run_shape_clustering # Run shape clustering (use for Indic langs).
--exposures EXPOSURES # A list of exposure levels to use (e.g. "-1 0 1").
OPTIONAL flags for input data. If unspecified we will look for them in
the langdata_dir directory.
--training_text TEXTFILE # Text to render and use for training.
--wordlist WORDFILE # Word list for the language ordered by
# decreasing frequency.
OPTIONAL flag to specify location of existing traineddata files, required
during feature extraction. If unspecified will use TESSDATA_PREFIX defined in
the current environment.
--tessdata_dir TESSDATADIR # Path to tesseract/tessdata directory.
NOTE:
The font names specified in --fontlist need to be recognizable by Pango using
fontconfig. An easy way to list the canonical names of all fonts available on
your system is to run text2image with --list_available_fonts and the
appropriate --fonts_dir path."
}
source "$(dirname $0)/tesstrain_utils.sh"
if [[ "$1" == "--help" || "$1" == "-h" ]]; then
display_usage
exit 0
fi
if [ $# == 0 ]; then
display_usage
exit 1
fi
ARGV=("$@")
parse_flags

View File

@ -2,7 +2,8 @@
// File: apiexample_test.cc
// Description: Api Test for Tesseract using text fixtures and parameters.
// Tests for Devanagari, Latin and Arabic scripts are disabled by default.
// Disabled tests can be run when required by using the --gtest_also_run_disabled_tests argument.
// Disabled tests can be run when required by using the
// --gtest_also_run_disabled_tests argument.
// ./unittest/apiexample_test --gtest_also_run_disabled_tests
//
// Author: ShreeDevi Kumar
@ -21,91 +22,89 @@
// expects clone of tessdata_fast repo in ../../tessdata_fast
//#include "log.h"
#include "include_gunit.h"
#include "baseapi.h"
#include "leptonica/allheaders.h"
#include <iostream>
#include <string>
#include <fstream>
#include <locale>
#include <limits.h>
#include <time.h>
#include <fstream>
#include <iostream>
#include <locale>
#include <string>
#include "baseapi.h"
#include "include_gunit.h"
#include "leptonica/allheaders.h"
namespace {
class QuickTest : public testing::Test {
protected:
virtual void SetUp() {
start_time_ = time(nullptr);
}
virtual void SetUp() { start_time_ = time(nullptr); }
virtual void TearDown() {
const time_t end_time = time(nullptr);
EXPECT_TRUE(end_time - start_time_ <=55) << "The test took too long - " << ::testing::PrintToString(end_time - start_time_);
EXPECT_TRUE(end_time - start_time_ <= 55)
<< "The test took too long - "
<< ::testing::PrintToString(end_time - start_time_);
}
time_t start_time_;
};
};
void OCRTester(const char* imgname, const char* groundtruth, const char* tessdatadir, const char* lang) {
//log.info() << tessdatadir << " for language: " << lang << std::endl;
char *outText;
std::locale loc("C"); // You can also use "" for the default system locale
std::ifstream file(groundtruth);
file.imbue(loc); // Use it for file input
std::string gtText((std::istreambuf_iterator<char>(file)), std::istreambuf_iterator<char>());
tesseract::TessBaseAPI *api = new tesseract::TessBaseAPI();
ASSERT_FALSE(api->Init(tessdatadir, lang)) << "Could not initialize tesseract.";
Pix *image = pixRead(imgname);
ASSERT_TRUE(image != nullptr) << "Failed to read test image.";
api->SetImage(image);
outText = api->GetUTF8Text();
EXPECT_EQ(gtText,outText) << "Phototest.tif OCR does not match ground truth for " << ::testing::PrintToString(lang);
api->End();
delete [] outText;
pixDestroy(&image);
}
void OCRTester(const char* imgname, const char* groundtruth,
const char* tessdatadir, const char* lang) {
// log.info() << tessdatadir << " for language: " << lang << std::endl;
char* outText;
std::locale loc("C"); // You can also use "" for the default system locale
std::ifstream file(groundtruth);
file.imbue(loc); // Use it for file input
std::string gtText((std::istreambuf_iterator<char>(file)),
std::istreambuf_iterator<char>());
tesseract::TessBaseAPI* api = new tesseract::TessBaseAPI();
ASSERT_FALSE(api->Init(tessdatadir, lang))
<< "Could not initialize tesseract.";
Pix* image = pixRead(imgname);
ASSERT_TRUE(image != nullptr) << "Failed to read test image.";
api->SetImage(image);
outText = api->GetUTF8Text();
EXPECT_EQ(gtText, outText)
<< "Phototest.tif OCR does not match ground truth for "
<< ::testing::PrintToString(lang);
api->End();
delete[] outText;
pixDestroy(&image);
}
class MatchGroundTruth : public QuickTest ,
public ::testing::WithParamInterface<const char*> {
};
class MatchGroundTruth : public QuickTest,
public ::testing::WithParamInterface<const char*> {};
TEST_P(MatchGroundTruth, FastPhototestOCR) {
OCRTester(TESTING_DIR "/phototest.tif",
TESTING_DIR "/phototest.txt",
TESSDATA_DIR "_fast", GetParam());
}
TEST_P(MatchGroundTruth, FastPhototestOCR) {
OCRTester(TESTING_DIR "/phototest.tif", TESTING_DIR "/phototest.txt",
TESSDATA_DIR "_fast", GetParam());
}
TEST_P(MatchGroundTruth, BestPhototestOCR) {
OCRTester(TESTING_DIR "/phototest.tif",
TESTING_DIR "/phototest.txt",
TESSDATA_DIR "_best", GetParam());
}
TEST_P(MatchGroundTruth, BestPhototestOCR) {
OCRTester(TESTING_DIR "/phototest.tif", TESTING_DIR "/phototest.txt",
TESSDATA_DIR "_best", GetParam());
}
TEST_P(MatchGroundTruth, TessPhototestOCR) {
OCRTester(TESTING_DIR "/phototest.tif",
TESTING_DIR "/phototest.txt",
TESSDATA_DIR , GetParam());
}
TEST_P(MatchGroundTruth, TessPhototestOCR) {
OCRTester(TESTING_DIR "/phototest.tif", TESTING_DIR "/phototest.txt",
TESSDATA_DIR, GetParam());
}
INSTANTIATE_TEST_CASE_P( Eng, MatchGroundTruth,
::testing::Values("eng") );
INSTANTIATE_TEST_CASE_P( DISABLED_Latin, MatchGroundTruth,
::testing::Values("script/Latin") );
INSTANTIATE_TEST_CASE_P( DISABLED_Deva, MatchGroundTruth,
::testing::Values("script/Devanagari") );
INSTANTIATE_TEST_CASE_P( DISABLED_Arabic, MatchGroundTruth,
::testing::Values("script/Arabic") );
INSTANTIATE_TEST_CASE_P(Eng, MatchGroundTruth, ::testing::Values("eng"));
INSTANTIATE_TEST_CASE_P(DISABLED_Latin, MatchGroundTruth,
::testing::Values("script/Latin"));
INSTANTIATE_TEST_CASE_P(DISABLED_Deva, MatchGroundTruth,
::testing::Values("script/Devanagari"));
INSTANTIATE_TEST_CASE_P(DISABLED_Arabic, MatchGroundTruth,
::testing::Values("script/Arabic"));
class EuroText : public QuickTest {
};
class EuroText : public QuickTest {};
TEST_F(EuroText, FastLatinOCR) {
OCRTester(TESTING_DIR "/eurotext.tif",
TESTING_DIR "/eurotext.txt",
TESSDATA_DIR "_fast", "script/Latin");
}
TEST_F(EuroText, FastLatinOCR) {
OCRTester(TESTING_DIR "/eurotext.tif", TESTING_DIR "/eurotext.txt",
TESSDATA_DIR "_fast", "script/Latin");
}
// script/Latin for eurotext.tif does not match groundtruth
// for tessdata & tessdata_best.
// so do not test these here.
// script/Latin for eurotext.tif does not match groundtruth
// for tessdata & tessdata_best.
// so do not test these here.
} // namespace

View File

@ -31,19 +31,13 @@ class ApplyBoxTest : public testing::Test {
std::string TestDataNameToPath(const std::string& name) {
return file::JoinPath(TESTING_DIR, name);
}
std::string TessdataPath() {
return TESSDATA_DIR;
}
std::string TessdataPath() { return TESSDATA_DIR; }
std::string OutputNameToPath(const std::string& name) {
return file::JoinPath(FLAGS_test_tmpdir, name);
}
ApplyBoxTest() {
src_pix_ = NULL;
}
~ApplyBoxTest() {
pixDestroy(&src_pix_);
}
ApplyBoxTest() { src_pix_ = nullptr; }
~ApplyBoxTest() { pixDestroy(&src_pix_); }
void SetImage(const char* filename) {
pixDestroy(&src_pix_);
@ -67,35 +61,34 @@ class ApplyBoxTest : public testing::Test {
api_.SetVariable("tessedit_resegment_from_line_boxes", "1");
else
api_.SetVariable("tessedit_resegment_from_boxes", "1");
api_.Recognize(NULL);
api_.Recognize(nullptr);
char* ocr_text = api_.GetUTF8Text();
EXPECT_STREQ(truth_str, ocr_text);
delete [] ocr_text;
delete[] ocr_text;
// Test the boxes by reading the target box file in parallel with the
// bounding boxes in the ocr output.
std::string box_filename = TestDataNameToPath(target_box_file);
FILE* box_file = OpenBoxFile(STRING(box_filename.c_str()));
ASSERT_TRUE(box_file != NULL);
ASSERT_TRUE(box_file != nullptr);
int height = pixGetHeight(src_pix_);
ResultIterator* it = api_.GetIterator();
do {
int left, top, right, bottom;
EXPECT_TRUE(it->BoundingBox(tesseract::RIL_SYMBOL,
&left, &top, &right, &bottom));
TBOX ocr_box(ICOORD(left, height - bottom),
ICOORD(right, height - top));
EXPECT_TRUE(
it->BoundingBox(tesseract::RIL_SYMBOL, &left, &top, &right, &bottom));
TBOX ocr_box(ICOORD(left, height - bottom), ICOORD(right, height - top));
int line_number;
TBOX truth_box;
STRING box_text;
EXPECT_TRUE(ReadNextBox(0, &line_number, box_file, &box_text,
&truth_box));
EXPECT_TRUE(
ReadNextBox(0, &line_number, box_file, &box_text, &truth_box));
// Testing for major overlap is a bit weak, but if they all
// major overlap successfully, then it has to be fairly close.
EXPECT_TRUE(ocr_box.major_overlap(truth_box));
// Also check that the symbol text matches the box text.
char* symbol_text = it->GetUTF8Text(tesseract::RIL_SYMBOL);
EXPECT_STREQ(box_text.string(), symbol_text);
delete [] symbol_text;
delete[] symbol_text;
} while (it->Next(tesseract::RIL_SYMBOL));
delete it;
}
@ -107,14 +100,14 @@ class ApplyBoxTest : public testing::Test {
// Tests character-level applyboxes on normal Times New Roman.
TEST_F(ApplyBoxTest, TimesCharLevel) {
VerifyBoxesAndText("trainingtimes.tif", kTruthTextWords,
"trainingtimes.box", false);
VerifyBoxesAndText("trainingtimes.tif", kTruthTextWords, "trainingtimes.box",
false);
}
// Tests character-level applyboxes on italic Times New Roman.
TEST_F(ApplyBoxTest, ItalicCharLevel) {
VerifyBoxesAndText("trainingital.tif", kTruthTextWords,
"trainingital.box", false);
VerifyBoxesAndText("trainingital.tif", kTruthTextWords, "trainingital.box",
false);
}
// Tests line-level applyboxes on normal Times New Roman.
@ -125,8 +118,8 @@ TEST_F(ApplyBoxTest, TimesLineLevel) {
// Tests line-level applyboxes on italic Times New Roman.
TEST_F(ApplyBoxTest, ItalLineLevel) {
VerifyBoxesAndText("trainingitalline.tif", kTruthTextLine,
"trainingital.box", true);
VerifyBoxesAndText("trainingitalline.tif", kTruthTextLine, "trainingital.box",
true);
}
} // namespace

View File

@ -8,16 +8,16 @@
namespace {
using ::testing::HasSubstr;
using ::testing::ContainsRegex;
using ::testing::HasSubstr;
const char* langs[] = {"eng", "vie", "hin", "ara", NULL};
const char* langs[] = {"eng", "vie", "hin", "ara", nullptr};
const char* image_files[] = {"HelloGoogle.tif", "viet.tif", "raaj.tif",
"arabic.tif", NULL};
"arabic.tif", nullptr};
const char* gt_text[] = {"Hello Google", "\x74\x69\xe1\xba\xbf\x6e\x67",
"\xe0\xa4\xb0\xe0\xa4\xbe\xe0\xa4\x9c",
"\xd8\xa7\xd9\x84\xd8\xb9\xd8\xb1\xd8\xa8\xd9\x8a",
NULL};
nullptr};
class FriendlyTessBaseAPI : public tesseract::TessBaseAPI {
FRIEND_TEST(TesseractTest, LSTMGeometryTest);
@ -25,7 +25,7 @@ class FriendlyTessBaseAPI : public tesseract::TessBaseAPI {
string GetCleanedTextResult(tesseract::TessBaseAPI* tess, Pix* pix) {
tess->SetImage(pix);
char *result = tess->GetUTF8Text();
char* result = tess->GetUTF8Text();
string ocr_result = result;
delete[] result;
absl::StripAsciiWhitespace(&ocr_result);
@ -36,19 +36,18 @@ string GetCleanedTextResult(tesseract::TessBaseAPI* tess, Pix* pix) {
class TesseractTest : public testing::Test {
protected:
string TestDataNameToPath(const string& name) {
return file::JoinPath(FLAGS_test_srcdir,
"testdata/" + name);
return file::JoinPath(FLAGS_test_srcdir, "testdata/" + name);
}
string TessdataPath() {
return file::JoinPath(FLAGS_test_srcdir,
"tessdata");
return file::JoinPath(FLAGS_test_srcdir, "tessdata");
}
};
// Tests that array sizes match their intended size.
TEST_F(TesseractTest, ArraySizeTest) {
int size = 0;
for (size = 0; kPolyBlockNames[size][0] != '\0'; ++size);
for (size = 0; kPolyBlockNames[size][0] != '\0'; ++size)
;
EXPECT_EQ(size, PT_COUNT);
}
@ -58,7 +57,7 @@ TEST_F(TesseractTest, BasicTesseractTest) {
string truth_text;
string ocr_text;
api.Init(TessdataPath().c_str(), "eng", tesseract::OEM_TESSERACT_ONLY);
Pix *src_pix = pixRead(TestDataNameToPath("phototest.tif").c_str());
Pix* src_pix = pixRead(TestDataNameToPath("phototest.tif").c_str());
CHECK(src_pix);
ocr_text = GetCleanedTextResult(&api, src_pix);
CHECK_OK(file::GetContents(TestDataNameToPath("phototest.gold.txt"),
@ -75,15 +74,15 @@ TEST_F(TesseractTest, IteratesParagraphsEvenIfNotDetected) {
api.Init(TessdataPath().c_str(), "eng", tesseract::OEM_TESSERACT_ONLY);
api.SetPageSegMode(tesseract::PSM_SINGLE_BLOCK);
api.SetVariable("paragraph_debug_level", "3");
Pix *src_pix = pixRead(TestDataNameToPath("b622.png").c_str());
Pix* src_pix = pixRead(TestDataNameToPath("b622.png").c_str());
CHECK(src_pix);
api.SetImage(src_pix);
Boxa* para_boxes = api.GetComponentImages(tesseract::RIL_PARA,
true, NULL, NULL);
EXPECT_TRUE(para_boxes != NULL);
Boxa* block_boxes = api.GetComponentImages(tesseract::RIL_BLOCK,
true, NULL, NULL);
EXPECT_TRUE(block_boxes != NULL);
Boxa* para_boxes =
api.GetComponentImages(tesseract::RIL_PARA, true, nullptr, nullptr);
EXPECT_TRUE(para_boxes != nullptr);
Boxa* block_boxes =
api.GetComponentImages(tesseract::RIL_BLOCK, true, nullptr, nullptr);
EXPECT_TRUE(block_boxes != nullptr);
// TODO(eger): Get paragraphs out of this page pre-text.
EXPECT_GE(boxaGetCount(para_boxes), boxaGetCount(block_boxes));
boxaDestroy(&block_boxes);
@ -96,14 +95,14 @@ TEST_F(TesseractTest, IteratesParagraphsEvenIfNotDetected) {
TEST_F(TesseractTest, HOCRWorksWithoutSetInputName) {
tesseract::TessBaseAPI api;
api.Init(TessdataPath().c_str(), "eng", tesseract::OEM_TESSERACT_ONLY);
Pix *src_pix = pixRead(TestDataNameToPath("HelloGoogle.tif").c_str());
Pix* src_pix = pixRead(TestDataNameToPath("HelloGoogle.tif").c_str());
CHECK(src_pix);
api.SetImage(src_pix);
char *result = api.GetHOCRText(0);
EXPECT_TRUE(result != NULL);
char* result = api.GetHOCRText(0);
EXPECT_TRUE(result != nullptr);
EXPECT_THAT(result, HasSubstr("Hello"));
EXPECT_THAT(result, HasSubstr("<div class='ocr_page'"));
delete [] result;
delete[] result;
pixDestroy(&src_pix);
}
@ -111,16 +110,16 @@ TEST_F(TesseractTest, HOCRWorksWithoutSetInputName) {
TEST_F(TesseractTest, HOCRContainsBaseline) {
tesseract::TessBaseAPI api;
api.Init(TessdataPath().c_str(), "eng", tesseract::OEM_TESSERACT_ONLY);
Pix *src_pix = pixRead(TestDataNameToPath("HelloGoogle.tif").c_str());
Pix* src_pix = pixRead(TestDataNameToPath("HelloGoogle.tif").c_str());
CHECK(src_pix);
api.SetInputName("HelloGoogle.tif");
api.SetImage(src_pix);
char *result = api.GetHOCRText(0);
EXPECT_TRUE(result != NULL);
char* result = api.GetHOCRText(0);
EXPECT_TRUE(result != nullptr);
EXPECT_THAT(result, HasSubstr("Hello"));
EXPECT_THAT(result, ContainsRegex("<span class='ocr_line'[^>]* "
"baseline [-.0-9]+ [-.0-9]+"));
delete [] result;
delete[] result;
pixDestroy(&src_pix);
}
@ -131,13 +130,13 @@ TEST_F(TesseractTest, HOCRContainsBaseline) {
TEST_F(TesseractTest, RickSnyderNotFuckSnyder) {
tesseract::TessBaseAPI api;
api.Init(TessdataPath().c_str(), "eng", tesseract::OEM_TESSERACT_ONLY);
Pix *src_pix = pixRead(TestDataNameToPath("rick_snyder.jpeg").c_str());
Pix* src_pix = pixRead(TestDataNameToPath("rick_snyder.jpeg").c_str());
CHECK(src_pix);
api.SetImage(src_pix);
char *result = api.GetHOCRText(0);
EXPECT_TRUE(result != NULL);
char* result = api.GetHOCRText(0);
EXPECT_TRUE(result != nullptr);
EXPECT_THAT(result, Not(HasSubstr("FUCK")));
delete [] result;
delete[] result;
pixDestroy(&src_pix);
}
@ -146,19 +145,12 @@ TEST_F(TesseractTest, AdaptToWordStrTest) {
static const char* kTrainingPages[] = {
"136.tif", "256.tif", "410.tif", "432.tif", "540.tif",
"692.tif", "779.tif", "793.tif", "808.tif", "815.tif",
"12.tif", "12.tif", NULL
};
"12.tif", "12.tif", nullptr};
static const char* kTrainingText[] = {
"1 3 6", "2 5 6", "4 1 0", "4 3 2", "5 4 0",
"6 9 2", "7 7 9", "7 9 3", "8 0 8", "8 1 5",
"1 2", "1 2", NULL
};
static const char* kTestPages[] = {
"324.tif", "433.tif", "12.tif", NULL
};
static const char* kTestText[] = {
"324", "433", "12", NULL
};
"1 3 6", "2 5 6", "4 1 0", "4 3 2", "5 4 0", "6 9 2", "7 7 9",
"7 9 3", "8 0 8", "8 1 5", "1 2", "1 2", nullptr};
static const char* kTestPages[] = {"324.tif", "433.tif", "12.tif", nullptr};
static const char* kTestText[] = {"324", "433", "12", nullptr};
tesseract::TessBaseAPI api;
string truth_text;
string ocr_text;
@ -166,22 +158,22 @@ TEST_F(TesseractTest, AdaptToWordStrTest) {
api.SetVariable("matcher_sufficient_examples_for_prototyping", "1");
api.SetVariable("classify_class_pruner_threshold", "220");
// Train on the training text.
for (int i = 0; kTrainingPages[i] != NULL; ++i) {
for (int i = 0; kTrainingPages[i] != nullptr; ++i) {
string image_file = TestDataNameToPath(kTrainingPages[i]);
Pix *src_pix = pixRead(image_file.c_str());
Pix* src_pix = pixRead(image_file.c_str());
CHECK(src_pix);
api.SetImage(src_pix);
EXPECT_TRUE(api.AdaptToWordStr(tesseract::PSM_SINGLE_WORD,
kTrainingText[i]))
<< "Failed to adapt to text \"" << kTrainingText[i]
<< "\" on image " << image_file;
EXPECT_TRUE(
api.AdaptToWordStr(tesseract::PSM_SINGLE_WORD, kTrainingText[i]))
<< "Failed to adapt to text \"" << kTrainingText[i] << "\" on image "
<< image_file;
pixDestroy(&src_pix);
}
// Test the test text.
api.SetVariable("tess_bn_matching", "1");
api.SetPageSegMode(tesseract::PSM_SINGLE_WORD);
for (int i = 0; kTestPages[i] != NULL; ++i) {
Pix *src_pix = pixRead(TestDataNameToPath(kTestPages[i]).c_str());
for (int i = 0; kTestPages[i] != nullptr; ++i) {
Pix* src_pix = pixRead(TestDataNameToPath(kTestPages[i]).c_str());
CHECK(src_pix);
ocr_text = GetCleanedTextResult(&api, src_pix);
absl::StripAsciiWhitespace(&truth_text);
@ -196,7 +188,7 @@ TEST_F(TesseractTest, BasicLSTMTest) {
string truth_text;
string ocr_text;
api.Init(TessdataPath().c_str(), "eng", tesseract::OEM_LSTM_ONLY);
Pix *src_pix = pixRead(TestDataNameToPath("phototest_2.tif").c_str());
Pix* src_pix = pixRead(TestDataNameToPath("phototest_2.tif").c_str());
CHECK(src_pix);
ocr_text = GetCleanedTextResult(&api, src_pix);
CHECK_OK(file::GetContents(TestDataNameToPath("phototest.gold.txt"),
@ -213,22 +205,22 @@ TEST_F(TesseractTest, BasicLSTMTest) {
// errors due to float/int conversions (e.g., see OUTLINE::move() in
// ccstruct/poutline.h) Instead, we do a loose check.
TEST_F(TesseractTest, LSTMGeometryTest) {
Pix *src_pix = pixRead(TestDataNameToPath("deslant.tif").c_str());
Pix* src_pix = pixRead(TestDataNameToPath("deslant.tif").c_str());
FriendlyTessBaseAPI api;
api.Init(TessdataPath().c_str(), "eng", tesseract::OEM_LSTM_ONLY);
api.SetImage(src_pix);
ASSERT_EQ(api.Recognize(NULL), 0);
ASSERT_EQ(api.Recognize(nullptr), 0);
const PAGE_RES *page_res = api.GetPageRes();
PAGE_RES_IT page_res_it(const_cast<PAGE_RES *>(page_res));
const PAGE_RES* page_res = api.GetPageRes();
PAGE_RES_IT page_res_it(const_cast<PAGE_RES*>(page_res));
page_res_it.restart_page();
BLOCK* block = page_res_it.block()->block;
CHECK(block);
// extract word and character boxes for each word
for (page_res_it.restart_page(); page_res_it.word () != NULL;
for (page_res_it.restart_page(); page_res_it.word() != nullptr;
page_res_it.forward()) {
WERD_RES *word = page_res_it.word();
WERD_RES* word = page_res_it.word();
CHECK(word);
CHECK(word->best_choice);
CHECK_GT(word->best_choice->length(), 0);
@ -255,13 +247,13 @@ TEST_F(TesseractTest, LSTMGeometryTest) {
TEST_F(TesseractTest, InitConfigOnlyTest) {
// Languages for testing initialization.
const char* langs[] = { "eng", "chi_tra", "jpn", "vie", "hin"};
const char* langs[] = {"eng", "chi_tra", "jpn", "vie", "hin"};
std::unique_ptr<tesseract::TessBaseAPI> api;
CycleTimer timer;
for (int i = 0; i < ARRAYSIZE(langs); ++i) {
api.reset(new tesseract::TessBaseAPI);
timer.Restart();
EXPECT_EQ(0, api->Init(TessdataPath().c_str(), langs[i] ,
EXPECT_EQ(0, api->Init(TessdataPath().c_str(), langs[i],
tesseract::OEM_TESSERACT_ONLY));
timer.Stop();
LOG(INFO) << "Lang " << langs[i] << " took " << timer.GetInMs()
@ -275,9 +267,9 @@ TEST_F(TesseractTest, InitConfigOnlyTest) {
for (int i = 0; i < ARRAYSIZE(langs); ++i) {
api.reset(new tesseract::TessBaseAPI);
timer.Restart();
EXPECT_EQ(0, api->Init(TessdataPath().c_str(), langs[i] ,
tesseract::OEM_TESSERACT_ONLY, NULL, 0,
&vars_vec, &vars_values, false));
EXPECT_EQ(0, api->Init(TessdataPath().c_str(), langs[i],
tesseract::OEM_TESSERACT_ONLY, nullptr, 0, &vars_vec,
&vars_values, false));
timer.Stop();
LOG(INFO) << "Lang " << langs[i] << " took " << timer.GetInMs()
<< "ms in config-only init";
@ -292,19 +284,17 @@ TEST_F(TesseractTest, InitConfigOnlyTest) {
// OEM_DEFAULT mode.
TEST(TesseractInstanceTest, TestMultipleTessInstances) {
int num_langs = 0;
while (langs[num_langs] != NULL) ++num_langs;
while (langs[num_langs] != nullptr) ++num_langs;
const string kTessdataPath = file::JoinPath(
FLAGS_test_srcdir,"tessdata");
const string kTessdataPath = file::JoinPath(FLAGS_test_srcdir, "tessdata");
// Preload images and verify that OCR is correct on them individually.
std::vector<Pix *> pix(num_langs);
std::vector<Pix*> pix(num_langs);
for (int i = 0; i < num_langs; ++i) {
SCOPED_TRACE(absl::StrCat("Single instance test with lang = ", langs[i]));
string path = FLAGS_test_srcdir
+ "/testdata/" + image_files[i];
string path = FLAGS_test_srcdir + "/testdata/" + image_files[i];
pix[i] = pixRead(path.c_str());
QCHECK(pix[i] != NULL) << "Could not read " << path;
QCHECK(pix[i] != nullptr) << "Could not read " << path;
tesseract::TessBaseAPI tess;
EXPECT_EQ(0, tess.Init(kTessdataPath.c_str(), langs[i]));
@ -329,32 +319,30 @@ TEST(TesseractInstanceTest, TestMultipleTessInstances) {
}
}
for (int i = 0; i < num_langs; ++i)
pixDestroy(&pix[i]);
for (int i = 0; i < num_langs; ++i) pixDestroy(&pix[i]);
}
// Tests whether Tesseract parameters are correctly set for the two instances.
TEST(TesseractInstanceTest, TestMultipleTessInstanceVariables) {
string illegal_name = "an_illegal_name";
string langs[2] = { "eng", "hin" };
string langs[2] = {"eng", "hin"};
string int_param_name = "tessedit_pageseg_mode";
int int_param[2] = { 1, 2 };
string int_param_str[2] = { "1", "2" };
int int_param[2] = {1, 2};
string int_param_str[2] = {"1", "2"};
string bool_param_name = "tessedit_ambigs_training";
bool bool_param[2] = { false, true };
string bool_param_str[2] = { "F", "T" };
bool bool_param[2] = {false, true};
string bool_param_str[2] = {"F", "T"};
string str_param_name = "tessedit_char_blacklist";
string str_param[2] = { "abc", "def" };
string str_param[2] = {"abc", "def"};
string double_param_name = "segment_penalty_dict_frequent_word";
string double_param_str[2] = { "0.01", "2" };
double double_param[2] = { 0.01, 2 };
string double_param_str[2] = {"0.01", "2"};
double double_param[2] = {0.01, 2};
const string kTessdataPath = file::JoinPath(
FLAGS_test_srcdir,"tessdata");
const string kTessdataPath = file::JoinPath(FLAGS_test_srcdir, "tessdata");
tesseract::TessBaseAPI tess1, tess2;
for (int i = 0; i < 2; ++i) {
tesseract::TessBaseAPI *api = (i == 0) ? &tess1 : &tess2;
tesseract::TessBaseAPI* api = (i == 0) ? &tess1 : &tess2;
api->Init(kTessdataPath.c_str(), langs[i].c_str());
api->SetVariable(illegal_name.c_str(), "none");
api->SetVariable(int_param_name.c_str(), int_param_str[i].c_str());
@ -363,7 +351,7 @@ TEST(TesseractInstanceTest, TestMultipleTessInstanceVariables) {
api->SetVariable(double_param_name.c_str(), double_param_str[i].c_str());
}
for (int i = 0; i < 2; ++i) {
tesseract::TessBaseAPI *api = (i == 0) ? &tess1 : &tess2;
tesseract::TessBaseAPI* api = (i == 0) ? &tess1 : &tess2;
EXPECT_FALSE(api->GetStringVariable(illegal_name.c_str()));
int intvar;
EXPECT_TRUE(api->GetIntVariable(int_param_name.c_str(), &intvar));

View File

@ -37,18 +37,16 @@ using tesseract::TessBaseAPI;
namespace {
const char* kTessLangs[] = { "eng", "vie", NULL };
const char* kTessImages[] = { "HelloGoogle.tif", "viet.tif", NULL };
const char* kTessTruthText[] = { "Hello Google", "\x74\x69\xe1\xba\xbf\x6e\x67",
NULL };
const char* kTessLangs[] = {"eng", "vie", nullptr};
const char* kTessImages[] = {"HelloGoogle.tif", "viet.tif", nullptr};
const char* kTessTruthText[] = {"Hello Google", "\x74\x69\xe1\xba\xbf\x6e\x67",
nullptr};
const char* kCubeLangs[] = { "hin", "ara", NULL };
const char* kCubeImages[] = { "raaj.tif", "arabic.tif", NULL};
const char* kCubeLangs[] = {"hin", "ara", nullptr};
const char* kCubeImages[] = {"raaj.tif", "arabic.tif", nullptr};
const char* kCubeTruthText[] = {
"\xe0\xa4\xb0\xe0\xa4\xbe\xe0\xa4\x9c",
"\xd8\xa7\xd9\x84\xd8\xb9\xd8\xb1\xd8\xa8\xd9\x8a",
NULL};
"\xe0\xa4\xb0\xe0\xa4\xbe\xe0\xa4\x9c",
"\xd8\xa7\xd9\x84\xd8\xb9\xd8\xb1\xd8\xa8\xd9\x8a", nullptr};
class BaseapiThreadTest : public ::testing::Test {
protected:
@ -85,16 +83,16 @@ class BaseapiThreadTest : public ::testing::Test {
// and so entirely disallow concurrent access of a Pix instance.
const int n = num_langs_ * FLAGS_reps;
for (int i = 0; i < n; ++i) {
string path = FLAGS_test_srcdir +
"/testdata/" +
image_files[i % num_langs_];
string path =
FLAGS_test_srcdir + "/testdata/" + image_files[i % num_langs_];
Pix* new_pix = pixRead(path.c_str());
QCHECK(new_pix != NULL) << "Could not read " << path;
QCHECK(new_pix != nullptr) << "Could not read " << path;
pix_.push_back(new_pix);
}
pool_size_ = (FLAGS_max_concurrent_instances < 1) ?
num_langs_ * FLAGS_reps : FLAGS_max_concurrent_instances;
pool_size_ = (FLAGS_max_concurrent_instances < 1)
? num_langs_ * FLAGS_reps
: FLAGS_max_concurrent_instances;
}
static void TearDownTestCase() {
@ -108,9 +106,7 @@ class BaseapiThreadTest : public ::testing::Test {
pool_->StartWorkers();
}
void WaitForPoolWorkers() {
pool_.reset(NULL);
}
void WaitForPoolWorkers() { pool_.reset(nullptr); }
std::unique_ptr<ThreadPool> pool_;
static int pool_size_;
@ -127,25 +123,23 @@ std::vector<string> BaseapiThreadTest::langs_;
std::vector<string> BaseapiThreadTest::gt_text_;
int BaseapiThreadTest::num_langs_;
void InitTessInstance(TessBaseAPI* tess, const string& lang) {
CHECK(tess != nullptr);
const string kTessdataPath = file::JoinPath(
FLAGS_test_srcdir, "tessdata");
const string kTessdataPath = file::JoinPath(FLAGS_test_srcdir, "tessdata");
EXPECT_EQ(0, tess->Init(kTessdataPath.c_str(), lang.c_str()));
}
void GetCleanedText(TessBaseAPI* tess, Pix* pix, string* ocr_text) {
tess->SetImage(pix);
char *result = tess->GetUTF8Text();
char* result = tess->GetUTF8Text();
*ocr_text = result;
delete[] result;
absl::StripAsciiWhitespace(ocr_text);
}
void VerifyTextResult(TessBaseAPI* tess, Pix* pix, const string& lang,
const string& expected_text) {
TessBaseAPI *tess_local = NULL;
const string& expected_text) {
TessBaseAPI* tess_local = nullptr;
if (tess) {
tess_local = tess;
} else {
@ -155,11 +149,9 @@ void VerifyTextResult(TessBaseAPI* tess, Pix* pix, const string& lang,
string ocr_text;
GetCleanedText(tess_local, pix, &ocr_text);
EXPECT_STREQ(expected_text.c_str(), ocr_text.c_str());
if (tess_local != tess)
delete tess_local;
if (tess_local != tess) delete tess_local;
}
// Check that Tesseract/Cube produce the correct results in single-threaded
// operation. If not, it is pointless to run the real multi-threaded tests.
TEST_F(BaseapiThreadTest, TestBasicSanity) {
@ -205,7 +197,7 @@ TEST_F(BaseapiThreadTest, TestAll) {
const int n = num_langs_ * FLAGS_reps;
ResetPool();
for (int i = 0; i < n; ++i) {
pool_->Add(NewCallback(VerifyTextResult, NULL, pix_[i],
pool_->Add(NewCallback(VerifyTextResult, nullptr, pix_[i],
langs_[i % num_langs_], gt_text_[i % num_langs_]));
}
WaitForPoolWorkers();

View File

@ -34,13 +34,11 @@ class BitVectorTest : public testing::Test {
TestAll(*map, false);
map->SetBit(2);
// Set all the odds to true.
for (int i = 3; i <= kPrimeLimit; i += 2)
map->SetValue(i, true);
for (int i = 3; i <= kPrimeLimit; i += 2) map->SetValue(i, true);
int factor_limit = static_cast<int>(sqrt(1.0 + kPrimeLimit));
for (int f = 3; f <= factor_limit; f += 2) {
if (map->At(f)) {
for (int m = 2; m * f <= kPrimeLimit; ++m)
map->ResetBit(f * m);
for (int m = 2; m * f <= kPrimeLimit; ++m) map->ResetBit(f * m);
}
}
}

View File

@ -16,25 +16,14 @@
// If this test fails to compile, clean up the includes in baseapi.h!
// They are not supposed to drag in definitions of any of the tesseract
// types included in this enum!
enum NameTester {
ABORT,
OKAY,
LOG,
BLOB,
ELIST,
TBOX,
TPOINT,
WORD
};
enum NameTester { ABORT, OKAY, LOG, BLOB, ELIST, TBOX, TPOINT, WORD };
#define ERRCODE_H // avoid redefinition of ABORT in errcode.h
#define ERRCODE_H // avoid redefinition of ABORT in errcode.h
#include "include_gunit.h"
namespace {
// Verifies that the global namespace is clean.
TEST(CleanNamespaceTess, DummyTest) {
tesseract::TessBaseAPI api;
}
TEST(CleanNamespaceTess, DummyTest) { tesseract::TessBaseAPI api; }
} // namespace.

View File

@ -27,11 +27,9 @@ class TestableColPartition : public ColPartition {
class ColPartitionTest : public testing::Test {
protected:
void SetUp() {
}
void SetUp() {}
void TearDown() {
}
void TearDown() {}
};
TEST_F(ColPartitionTest, IsInSameColumnAsReflexive) {
@ -75,4 +73,4 @@ TEST_F(ColPartitionTest, IsInSameColumnAsPartialOverlap) {
EXPECT_TRUE(b.IsInSameColumnAs(a));
}
} // namespace
} // namespace

View File

@ -41,10 +41,9 @@ class CommandlineflagsTest : public ::testing::Test {
}
};
TEST_F(CommandlineflagsTest, RemoveFlags) {
const char* const_argv[] = { "Progname", "--foo_int", "3",
"file1.h", "file2.h" };
const char* const_argv[] = {"Progname", "--foo_int", "3", "file1.h",
"file2.h"};
int argc = ARRAYSIZE(const_argv);
char** argv = const_cast<char**>(const_argv);
tesseract::ParseCommandLineFlags(argv[0], &argc, &argv, true);
@ -56,7 +55,7 @@ TEST_F(CommandlineflagsTest, RemoveFlags) {
EXPECT_STREQ("file2.h", argv[2]);
}
#if 0 // TODO: this test needs an update (it currently fails).
#if 0 // TODO: this test needs an update (it currently fails).
TEST_F(CommandlineflagsTest, PrintUsageAndExit) {
const char* argv[] = { "Progname", "--help" };
EXPECT_EXIT(TestParser("Progname [flags]", ARRAYSIZE(argv), argv),
@ -66,66 +65,65 @@ TEST_F(CommandlineflagsTest, PrintUsageAndExit) {
#endif
TEST_F(CommandlineflagsTest, ExitsWithErrorOnInvalidFlag) {
const char* argv[] = { "", "--test_nonexistent_flag" };
EXPECT_EXIT(TestParser(ARRAYSIZE(argv), argv),
::testing::ExitedWithCode(1),
const char* argv[] = {"", "--test_nonexistent_flag"};
EXPECT_EXIT(TestParser(ARRAYSIZE(argv), argv), ::testing::ExitedWithCode(1),
"ERROR: Non-existent flag");
}
TEST_F(CommandlineflagsTest, ParseIntegerFlags) {
const char* argv[] = { "", "--foo_int=3", "--bar_int", "-4" };
const char* argv[] = {"", "--foo_int=3", "--bar_int", "-4"};
TestParser(ARRAYSIZE(argv), argv);
EXPECT_EQ(3, FLAGS_foo_int);
EXPECT_EQ(-4, FLAGS_bar_int);
const char* arg_no_value[] = { "", "--bar_int" };
const char* arg_no_value[] = {"", "--bar_int"};
EXPECT_EXIT(TestParser(ARRAYSIZE(arg_no_value), arg_no_value),
::testing::ExitedWithCode(1), "ERROR");
const char* arg_invalid_value[] = { "", "--bar_int", "--foo_int=3" };
const char* arg_invalid_value[] = {"", "--bar_int", "--foo_int=3"};
EXPECT_EXIT(TestParser(ARRAYSIZE(arg_invalid_value), arg_invalid_value),
::testing::ExitedWithCode(1), "ERROR");
const char* arg_bad_format[] = { "", "--bar_int=" };
const char* arg_bad_format[] = {"", "--bar_int="};
EXPECT_EXIT(TestParser(ARRAYSIZE(arg_bad_format), arg_bad_format),
::testing::ExitedWithCode(1), "ERROR");
}
TEST_F(CommandlineflagsTest, ParseDoubleFlags) {
const char* argv[] = { "", "--foo_double=3.14", "--bar_double", "1.2" };
const char* argv[] = {"", "--foo_double=3.14", "--bar_double", "1.2"};
TestParser(ARRAYSIZE(argv), argv);
EXPECT_EQ(3.14, FLAGS_foo_double);
EXPECT_EQ(1.2, FLAGS_bar_double);
const char* arg_no_value[] = { "", "--bar_double" };
EXPECT_EXIT(TestParser(2, arg_no_value),
::testing::ExitedWithCode(1), "ERROR");
const char* arg_no_value[] = {"", "--bar_double"};
EXPECT_EXIT(TestParser(2, arg_no_value), ::testing::ExitedWithCode(1),
"ERROR");
const char* arg_bad_format[] = { "", "--bar_double=" };
EXPECT_EXIT(TestParser(2, arg_bad_format),
::testing::ExitedWithCode(1), "ERROR");
const char* arg_bad_format[] = {"", "--bar_double="};
EXPECT_EXIT(TestParser(2, arg_bad_format), ::testing::ExitedWithCode(1),
"ERROR");
}
TEST_F(CommandlineflagsTest, ParseStringFlags) {
const char* argv[] = { "", "--foo_string=abc", "--bar_string", "def" };
const char* argv[] = {"", "--foo_string=abc", "--bar_string", "def"};
TestParser(ARRAYSIZE(argv), argv);
EXPECT_STREQ("abc", FLAGS_foo_string.c_str());
EXPECT_STREQ("def", FLAGS_bar_string.c_str());
const char* arg_no_value[] = { "", "--bar_string" };
EXPECT_EXIT(TestParser(2, arg_no_value),
::testing::ExitedWithCode(1), "ERROR");
const char* arg_no_value[] = {"", "--bar_string"};
EXPECT_EXIT(TestParser(2, arg_no_value), ::testing::ExitedWithCode(1),
"ERROR");
FLAGS_bar_string.set_value("bar");
const char* arg_empty_string[] = { "", "--bar_string=" };
const char* arg_empty_string[] = {"", "--bar_string="};
TestParser(2, arg_empty_string);
EXPECT_STREQ("", FLAGS_bar_string.c_str());
}
TEST_F(CommandlineflagsTest, ParseBoolFlags) {
const char* argv[] = { "", "--foo_bool=true", "--bar_bool=1" };
const char* argv[] = {"", "--foo_bool=true", "--bar_bool=1"};
FLAGS_foo_bool.set_value(false);
FLAGS_bar_bool.set_value(false);
TestParser(ARRAYSIZE(argv), argv);
@ -133,7 +131,7 @@ TEST_F(CommandlineflagsTest, ParseBoolFlags) {
EXPECT_TRUE(FLAGS_foo_bool);
EXPECT_TRUE(FLAGS_bar_bool);
const char* inv_argv[] = { "", "--foo_bool=false", "--bar_bool=0" };
const char* inv_argv[] = {"", "--foo_bool=false", "--bar_bool=0"};
FLAGS_foo_bool.set_value(true);
FLAGS_bar_bool.set_value(true);
TestParser(3, inv_argv);
@ -141,19 +139,19 @@ TEST_F(CommandlineflagsTest, ParseBoolFlags) {
EXPECT_FALSE(FLAGS_foo_bool);
EXPECT_FALSE(FLAGS_bar_bool);
const char* arg_implied_true[] = { "", "--bar_bool" };
const char* arg_implied_true[] = {"", "--bar_bool"};
FLAGS_bar_bool.set_value(false);
TestParser(2, arg_implied_true);
EXPECT_TRUE(FLAGS_bar_bool);
const char* arg_missing_val[] = { "", "--bar_bool=" };
EXPECT_EXIT(TestParser(2, arg_missing_val),
::testing::ExitedWithCode(1), "ERROR");
const char* arg_missing_val[] = {"", "--bar_bool="};
EXPECT_EXIT(TestParser(2, arg_missing_val), ::testing::ExitedWithCode(1),
"ERROR");
}
TEST_F(CommandlineflagsTest, ParseOldFlags) {
EXPECT_STREQ("", FLAGS_q.c_str());
const char* argv[] = { "", "-q", "text" };
const char* argv[] = {"", "-q", "text"};
TestParser(ARRAYSIZE(argv), argv);
EXPECT_STREQ("text", FLAGS_q.c_str());
}

View File

@ -5,20 +5,20 @@
#include "util/process/subprocess.h"
#include "tesseract/dict/trie.h"
#include "tesseract/ccutil/unicharset.h"
#include "tesseract/ccstruct/ratngs.h"
#include "tesseract/ccutil/unicharset.h"
#include "tesseract/dict/trie.h"
namespace {
void RemoveTrailingLineTerminators(char *line) {
char *end = line + strlen(line) - 1;
void RemoveTrailingLineTerminators(char* line) {
char* end = line + strlen(line) - 1;
while (end >= line && ('\n' == *end || '\r' == *end)) {
*end-- = 0;
}
}
void AddLineToSet(std::set<string> *words, char *line) {
void AddLineToSet(std::set<string>* words, char* line) {
RemoveTrailingLineTerminators(line);
words->insert(line);
}
@ -27,7 +27,7 @@ void AddLineToSet(std::set<string> *words, char *line) {
// aka Directed Acyclic Word Graphs).
class DawgTest : public testing::Test {
protected:
void LoadWordlist(const string &filename, std::set<string> *words) const {
void LoadWordlist(const string& filename, std::set<string>* words) const {
FileLineReader::Options options;
options.set_comment_char(0);
FileLineReader flr(filename.c_str(), options);
@ -35,8 +35,7 @@ class DawgTest : public testing::Test {
flr.Reload();
}
string TestDataNameToPath(const string& name) const {
return file::JoinPath(FLAGS_test_srcdir,
"testdata/" + name);
return file::JoinPath(FLAGS_test_srcdir, "testdata/" + name);
}
string TessBinaryPath(const string& binary_name) const {
return file::JoinPath(FLAGS_test_srcdir,
@ -44,10 +43,8 @@ class DawgTest : public testing::Test {
string OutputNameToPath(const string& name) const {
return file::JoinPath(FLAGS_test_tmpdir, name);
}
int RunCommand(const string &program,
const string &arg1,
const string &arg2,
const string &arg3) const {
int RunCommand(const string& program, const string& arg1, const string& arg2,
const string& arg3) const {
SubProcess p;
std::vector<string> argv;
argv.push_back(program);
@ -62,8 +59,8 @@ class DawgTest : public testing::Test {
// Test that we are able to convert a wordlist file (one "word" per line) to
// a dawg (a compressed format) and then extract the original wordlist back
// out using the tools "wordlist2dawg" and "dawg2wordlist."
void TestDawgRoundTrip(const string &unicharset_filename,
const string &wordlist_filename) const {
void TestDawgRoundTrip(const string& unicharset_filename,
const string& wordlist_filename) const {
std::set<string> orig_words, roundtrip_words;
string unicharset = TestDataNameToPath(unicharset_filename);
string orig_wordlist = TestDataNameToPath(wordlist_filename);
@ -71,8 +68,7 @@ class DawgTest : public testing::Test {
string output_wordlist = OutputNameToPath(wordlist_filename);
LoadWordlist(orig_wordlist, &orig_words);
EXPECT_EQ(
RunCommand("wordlist2dawg", orig_wordlist, output_dawg, unicharset),
0);
RunCommand("wordlist2dawg", orig_wordlist, output_dawg, unicharset), 0);
EXPECT_EQ(
RunCommand("dawg2wordlist", unicharset, output_dawg, output_wordlist),
0);

View File

@ -18,11 +18,9 @@ namespace {
class DENORMTest : public testing::Test {
public:
void SetUp() {
}
void SetUp() {}
void TearDown() {
}
void TearDown() {}
void ExpectCorrectTransform(const DENORM& denorm, const TPOINT& src,
const TPOINT& result, bool local) {
@ -30,7 +28,7 @@ class DENORMTest : public testing::Test {
if (local)
denorm.LocalNormTransform(src, &normed);
else
denorm.NormTransform(NULL, src, &normed);
denorm.NormTransform(nullptr, src, &normed);
EXPECT_EQ(result.x, normed.x);
EXPECT_EQ(result.y, normed.y);
// Now undo
@ -38,7 +36,7 @@ class DENORMTest : public testing::Test {
if (local)
denorm.LocalDenormTransform(normed, &denormed);
else
denorm.DenormTransform(NULL, normed, &denormed);
denorm.DenormTransform(nullptr, normed, &denormed);
EXPECT_EQ(src.x, denormed.x);
EXPECT_EQ(src.y, denormed.y);
}
@ -47,8 +45,7 @@ class DENORMTest : public testing::Test {
// Tests a simple baseline-style normalization.
TEST_F(DENORMTest, NoRotations) {
DENORM denorm;
denorm.SetupNormalization(NULL, NULL, NULL,
1000.0f, 2000.0f, 2.0f, 3.0f,
denorm.SetupNormalization(nullptr, nullptr, nullptr, 1000.0f, 2000.0f, 2.0f, 3.0f,
0.0f, static_cast<float>(kBlnBaselineOffset));
TPOINT pt1(1100, 2000);
TPOINT result1(200, kBlnBaselineOffset);
@ -64,9 +61,8 @@ TEST_F(DENORMTest, NoRotations) {
TEST_F(DENORMTest, WithRotations) {
DENORM denorm;
FCOORD rotation90(0.0f, 1.0f);
denorm.SetupNormalization(NULL, &rotation90, NULL,
1000.0f, 2000.0f, 2.0f, 3.0f,
0.0f, static_cast<float>(kBlnBaselineOffset));
denorm.SetupNormalization(nullptr, &rotation90, nullptr, 1000.0f, 2000.0f, 2.0f,
3.0f, 0.0f, static_cast<float>(kBlnBaselineOffset));
TPOINT pt1(1100, 2000);
TPOINT result1(0, 200 + kBlnBaselineOffset);
@ -81,14 +77,13 @@ TEST_F(DENORMTest, WithRotations) {
// Tests a simple baseline-style normalization with a second rotation & scale.
TEST_F(DENORMTest, Multiple) {
DENORM denorm;
denorm.SetupNormalization(NULL, NULL, NULL,
1000.0f, 2000.0f, 2.0f, 3.0f,
denorm.SetupNormalization(nullptr, nullptr, nullptr, 1000.0f, 2000.0f, 2.0f, 3.0f,
0.0f, static_cast<float>(kBlnBaselineOffset));
DENORM denorm2;
FCOORD rotation90(0.0f, 1.0f);
denorm2.SetupNormalization(NULL, &rotation90, &denorm,
128.0f, 128.0f, 0.5f, 0.25f, 0.0f, 0.0f);
denorm2.SetupNormalization(nullptr, &rotation90, &denorm, 128.0f, 128.0f, 0.5f,
0.25f, 0.0f, 0.0f);
TPOINT pt1(1050, 2000);
TPOINT result1(100, kBlnBaselineOffset);
ExpectCorrectTransform(denorm, pt1, result1, true);

View File

@ -13,9 +13,8 @@ namespace tesseract {
class TestableEquationDetect : public EquationDetect {
public:
TestableEquationDetect(const char* tessdata,
Tesseract* lang_tesseract) :
EquationDetect(tessdata, "equ") {
TestableEquationDetect(const char* tessdata, Tesseract* lang_tesseract)
: EquationDetect(tessdata, "equ") {
SetLangTesseract(lang_tesseract);
}
@ -26,46 +25,44 @@ class TestableEquationDetect : public EquationDetect {
CHECK_LE(math_blobs + digit_blobs, total_blobs);
int count = 0;
for (int i = 0; i < math_blobs; i++, count++) {
BLOBNBOX *blob = new BLOBNBOX();
BLOBNBOX* blob = new BLOBNBOX();
blob->set_special_text_type(BSTT_MATH);
part->AddBox(blob);
}
for (int i = 0; i < digit_blobs; i++, count++) {
BLOBNBOX *blob = new BLOBNBOX();
BLOBNBOX* blob = new BLOBNBOX();
blob->set_special_text_type(BSTT_DIGIT);
part->AddBox(blob);
}
for (int i = count; i < total_blobs; i++) {
BLOBNBOX *blob = new BLOBNBOX();
BLOBNBOX* blob = new BLOBNBOX();
blob->set_special_text_type(BSTT_NONE);
part->AddBox(blob);
}
}
// Set up pix_binary for lang_tesseract_.
void SetPixBinary(Pix *pix) {
void SetPixBinary(Pix* pix) {
CHECK_EQ(1, pixGetDepth(pix));
*(lang_tesseract_->mutable_pix_binary()) = pix;
}
void RunIdentifySpecialText(BLOBNBOX*blob, const int height_th) {
void RunIdentifySpecialText(BLOBNBOX* blob, const int height_th) {
IdentifySpecialText(blob, height_th);
}
BlobSpecialTextType RunEstimateTypeForUnichar(
const char*val) {
BlobSpecialTextType RunEstimateTypeForUnichar(const char* val) {
const UNICHARSET& unicharset = lang_tesseract_->unicharset;
return EstimateTypeForUnichar(unicharset, unicharset.unichar_to_id(val));
}
EquationDetect::IndentType RunIsIndented(
ColPartitionGrid* part_grid, ColPartition* part) {
EquationDetect::IndentType RunIsIndented(ColPartitionGrid* part_grid,
ColPartition* part) {
this->part_grid_ = part_grid;
return IsIndented(part);
}
bool RunIsNearSmallNeighbor(const TBOX& seed_box,
const TBOX& part_box) {
bool RunIsNearSmallNeighbor(const TBOX& seed_box, const TBOX& part_box) {
return IsNearSmallNeighbor(seed_box, part_box);
}
@ -108,38 +105,34 @@ class EquationFinderTest : public testing::Test {
string testdata_dir_;
void SetUp() {
string tessdata_dir = file::JoinPath(
FLAGS_test_srcdir, "tessdata");
string tessdata_dir = file::JoinPath(FLAGS_test_srcdir, "tessdata");
tesseract_.reset(new Tesseract());
tesseract_->init_tesseract(tessdata_dir.c_str(), "eng", OEM_TESSERACT_ONLY);
tesseract_->set_source_resolution(300);
equation_det_.reset(new TestableEquationDetect(
tessdata_dir.c_str(), tesseract_.get()));
equation_det_.reset(
new TestableEquationDetect(tessdata_dir.c_str(), tesseract_.get()));
equation_det_->SetResolution(300);
testdata_dir_ = file::JoinPath(
FLAGS_test_srcdir, "testdata");
testdata_dir_ = file::JoinPath(FLAGS_test_srcdir, "testdata");
}
void TearDown() {
tesseract_.reset(NULL);
equation_det_.reset(NULL);
tesseract_.reset(nullptr);
equation_det_.reset(nullptr);
}
// Add a BLOCK covering the whole page.
void AddPageBlock(Pix* pix,
BLOCK_LIST* blocks) {
void AddPageBlock(Pix* pix, BLOCK_LIST* blocks) {
CHECK(pix != nullptr);
CHECK(blocks != nullptr);
BLOCK_IT block_it(blocks);
BLOCK* block = new BLOCK("", TRUE, 0, 0, 0, 0,
pixGetWidth(pix), pixGetHeight(pix));
BLOCK* block =
new BLOCK("", TRUE, 0, 0, 0, 0, pixGetWidth(pix), pixGetHeight(pix));
block_it.add_to_end(block);
}
// Create col partitions, add into part_grid, and put them into all_parts.
void CreateColParts(const int rows,
const int cols,
void CreateColParts(const int rows, const int cols,
ColPartitionGrid* part_grid,
std::vector<ColPartition*>* all_parts) {
const int kWidth = 10, kHeight = 10;
@ -148,8 +141,8 @@ class EquationFinderTest : public testing::Test {
for (int x = 0; x < cols; ++x) {
int left = x * kWidth * 2, bottom = y * kHeight * 2;
TBOX box(left, bottom, left + kWidth, bottom + kHeight);
ColPartition* part = ColPartition::FakePartition(
box, PT_FLOWING_TEXT, BRT_TEXT, BTFT_NONE);
ColPartition* part = ColPartition::FakePartition(box, PT_FLOWING_TEXT,
BRT_TEXT, BTFT_NONE);
part_grid->InsertBBox(true, true, part);
all_parts->push_back(part);
}
@ -159,15 +152,14 @@ class EquationFinderTest : public testing::Test {
void ClearParts(std::vector<ColPartition*>* all_parts) {
for (int i = 0; i < all_parts->size(); ++i) {
(*all_parts)[i]->DeleteBoxes();
delete((*all_parts)[i]);
delete ((*all_parts)[i]);
}
}
// Create a BLOBNBOX object with bounding box tbox, and add it into part.
void AddBlobIntoPart(const TBOX& tbox,
ColPartition* part) {
void AddBlobIntoPart(const TBOX& tbox, ColPartition* part) {
CHECK(part != nullptr);
BLOBNBOX *blob = new BLOBNBOX();
BLOBNBOX* blob = new BLOBNBOX();
blob->set_bounding_box(tbox);
part->AddBox(blob);
}
@ -176,8 +168,8 @@ class EquationFinderTest : public testing::Test {
TEST_F(EquationFinderTest, IdentifySpecialText) {
// Load Image.
string imagefile = file::JoinPath(testdata_dir_, "equ_gt1.tif");
Pix *pix_binary = pixRead(imagefile.c_str());
CHECK(pix_binary != NULL && pixGetDepth(pix_binary) == 1);
Pix* pix_binary = pixRead(imagefile.c_str());
CHECK(pix_binary != nullptr && pixGetDepth(pix_binary) == 1);
// Get components.
BLOCK_LIST blocks;
@ -251,24 +243,24 @@ TEST_F(EquationFinderTest, IsIndented) {
//
// part 5: ********
TBOX box1(0, 950, 999, 999);
ColPartition* part1 = ColPartition::FakePartition(
box1, PT_FLOWING_TEXT, BRT_TEXT, BTFT_NONE);
ColPartition* part1 =
ColPartition::FakePartition(box1, PT_FLOWING_TEXT, BRT_TEXT, BTFT_NONE);
part_grid.InsertBBox(true, true, part1);
TBOX box2(300, 920, 900, 940);
ColPartition* part2 = ColPartition::FakePartition(
box2, PT_FLOWING_TEXT, BRT_TEXT, BTFT_NONE);
ColPartition* part2 =
ColPartition::FakePartition(box2, PT_FLOWING_TEXT, BRT_TEXT, BTFT_NONE);
part_grid.InsertBBox(true, true, part2);
TBOX box3(0, 900, 600, 910);
ColPartition* part3 = ColPartition::FakePartition(
box3, PT_FLOWING_TEXT, BRT_TEXT, BTFT_NONE);
ColPartition* part3 =
ColPartition::FakePartition(box3, PT_FLOWING_TEXT, BRT_TEXT, BTFT_NONE);
part_grid.InsertBBox(true, true, part3);
TBOX box4(300, 890, 600, 899);
ColPartition* part4 = ColPartition::FakePartition(
box4, PT_FLOWING_TEXT, BRT_TEXT, BTFT_NONE);
ColPartition* part4 =
ColPartition::FakePartition(box4, PT_FLOWING_TEXT, BRT_TEXT, BTFT_NONE);
part_grid.InsertBBox(true, true, part4);
TBOX box5(300, 500, 900, 510);
ColPartition* part5 = ColPartition::FakePartition(
box5, PT_FLOWING_TEXT, BRT_TEXT, BTFT_NONE);
ColPartition* part5 =
ColPartition::FakePartition(box5, PT_FLOWING_TEXT, BRT_TEXT, BTFT_NONE);
part_grid.InsertBBox(true, true, part5);
// Test
@ -290,15 +282,15 @@ TEST_F(EquationFinderTest, IsIndented) {
// Release memory.
part1->DeleteBoxes();
delete(part1);
delete (part1);
part2->DeleteBoxes();
delete(part2);
delete (part2);
part3->DeleteBoxes();
delete(part3);
delete (part3);
part4->DeleteBoxes();
delete(part4);
delete (part4);
part5->DeleteBoxes();
delete(part5);
delete (part5);
}
TEST_F(EquationFinderTest, IsNearSmallNeighbor) {
@ -332,14 +324,14 @@ TEST_F(EquationFinderTest, IsNearSmallNeighbor) {
TEST_F(EquationFinderTest, CheckSeedBlobsCount) {
TBOX box(0, 950, 999, 999);
ColPartition* part1 = ColPartition::FakePartition(
box, PT_FLOWING_TEXT, BRT_TEXT, BTFT_NONE);
ColPartition* part2= ColPartition::FakePartition(
box, PT_FLOWING_TEXT, BRT_TEXT, BTFT_NONE);
ColPartition* part3 = ColPartition::FakePartition(
box, PT_FLOWING_TEXT, BRT_TEXT, BTFT_NONE);
ColPartition* part4 = ColPartition::FakePartition(
box, PT_FLOWING_TEXT, BRT_TEXT, BTFT_NONE);
ColPartition* part1 =
ColPartition::FakePartition(box, PT_FLOWING_TEXT, BRT_TEXT, BTFT_NONE);
ColPartition* part2 =
ColPartition::FakePartition(box, PT_FLOWING_TEXT, BRT_TEXT, BTFT_NONE);
ColPartition* part3 =
ColPartition::FakePartition(box, PT_FLOWING_TEXT, BRT_TEXT, BTFT_NONE);
ColPartition* part4 =
ColPartition::FakePartition(box, PT_FLOWING_TEXT, BRT_TEXT, BTFT_NONE);
// Part 1: 8 math, 0 digit, 20 total.
equation_det_->AddMathDigitBlobs(8, 0, 20, part1);
@ -359,20 +351,20 @@ TEST_F(EquationFinderTest, CheckSeedBlobsCount) {
// Release memory.
part1->DeleteBoxes();
delete(part1);
delete (part1);
part2->DeleteBoxes();
delete(part2);
delete (part2);
part3->DeleteBoxes();
delete(part3);
delete (part3);
part4->DeleteBoxes();
delete(part4);
delete (part4);
}
TEST_F(EquationFinderTest, ComputeForegroundDensity) {
// Create the pix with top half foreground, bottom half background.
int width = 1024, height = 768;
Pix *pix = pixCreate(width, height, 1);
pixRasterop(pix, 0, 0, width, height / 2, PIX_SET, NULL, 0, 0);
Pix* pix = pixCreate(width, height, 1);
pixRasterop(pix, 0, 0, width, height / 2, PIX_SET, nullptr, 0, 0);
TBOX box1(100, 0, 140, 140), box2(100, height / 2 - 20, 140, height / 2 + 20),
box3(100, height - 40, 140, height);
equation_det_->SetPixBinary(pix);
@ -414,20 +406,20 @@ TEST_F(EquationFinderTest, ComputeCPsSuperBBox) {
ColPartitionGrid part_grid(10, ICOORD(0, 0), ICOORD(1000, 1000));
TBOX box1(0, 0, 999, 99);
ColPartition* part1 = ColPartition::FakePartition(
box1, PT_FLOWING_TEXT, BRT_TEXT, BTFT_NONE);
ColPartition* part1 =
ColPartition::FakePartition(box1, PT_FLOWING_TEXT, BRT_TEXT, BTFT_NONE);
TBOX box2(0, 100, 499, 199);
ColPartition* part2 = ColPartition::FakePartition(
box2, PT_FLOWING_TEXT, BRT_TEXT, BTFT_NONE);
ColPartition* part2 =
ColPartition::FakePartition(box2, PT_FLOWING_TEXT, BRT_TEXT, BTFT_NONE);
TBOX box3(500, 100, 999, 199);
ColPartition* part3 = ColPartition::FakePartition(
box3, PT_FLOWING_TEXT, BRT_TEXT, BTFT_NONE);
ColPartition* part3 =
ColPartition::FakePartition(box3, PT_FLOWING_TEXT, BRT_TEXT, BTFT_NONE);
TBOX box4(0, 200, 999, 299);
ColPartition* part4 = ColPartition::FakePartition(
box4, PT_FLOWING_TEXT, BRT_TEXT, BTFT_NONE);
ColPartition* part4 =
ColPartition::FakePartition(box4, PT_FLOWING_TEXT, BRT_TEXT, BTFT_NONE);
TBOX box5(0, 900, 999, 999);
ColPartition* part5 = ColPartition::FakePartition(
box5, PT_FLOWING_TEXT, BRT_TEXT, BTFT_NONE);
ColPartition* part5 =
ColPartition::FakePartition(box5, PT_FLOWING_TEXT, BRT_TEXT, BTFT_NONE);
// Add part1->part3 into part_grid and test.
part_grid.InsertBBox(true, true, part1);
@ -448,21 +440,21 @@ TEST_F(EquationFinderTest, ComputeCPsSuperBBox) {
// Release memory.
part1->DeleteBoxes();
delete(part1);
delete (part1);
part2->DeleteBoxes();
delete(part2);
delete (part2);
part3->DeleteBoxes();
delete(part3);
delete (part3);
part4->DeleteBoxes();
delete(part4);
delete (part4);
part5->DeleteBoxes();
delete(part5);
delete (part5);
}
TEST_F(EquationFinderTest, SplitCPHorLite) {
TBOX box(0, 0, 999, 99);
ColPartition* part = ColPartition::FakePartition(
box, PT_FLOWING_TEXT, BRT_TEXT, BTFT_NONE);
ColPartition* part =
ColPartition::FakePartition(box, PT_FLOWING_TEXT, BRT_TEXT, BTFT_NONE);
part->DeleteBoxes();
part->set_median_width(10);
GenericVector<TBOX> splitted_boxes;
@ -491,13 +483,13 @@ TEST_F(EquationFinderTest, SplitCPHorLite) {
EXPECT_TRUE(TBOX(500, 0, 540, 35) == splitted_boxes[2]);
part->DeleteBoxes();
delete(part);
delete (part);
}
TEST_F(EquationFinderTest, SplitCPHor) {
TBOX box(0, 0, 999, 99);
ColPartition* part = ColPartition::FakePartition(
box, PT_FLOWING_TEXT, BRT_TEXT, BTFT_NONE);
ColPartition* part =
ColPartition::FakePartition(box, PT_FLOWING_TEXT, BRT_TEXT, BTFT_NONE);
part->DeleteBoxes();
part->set_median_width(10);
GenericVector<ColPartition*> parts_splitted;
@ -528,7 +520,7 @@ TEST_F(EquationFinderTest, SplitCPHor) {
parts_splitted.delete_data_pointers();
part->DeleteBoxes();
delete(part);
delete (part);
}
} // namespace tesseract

View File

@ -18,8 +18,7 @@ TEST(FileTest, JoinPath) {
TEST(OutputBufferTest, WriteString) {
const int kMaxBufSize = 128;
char buffer[kMaxBufSize];
for (int i = 0; i < kMaxBufSize; ++i)
buffer[i] = '\0';
for (int i = 0; i < kMaxBufSize; ++i) buffer[i] = '\0';
FILE* fp = fmemopen(buffer, kMaxBufSize, "w");
CHECK(fp != nullptr);

View File

@ -21,12 +21,12 @@
namespace tesseract {
int test_data[] = { 8, 1, 2, -4, 7, 9, 65536, 4, 9, 0};
int test_data[] = {8, 1, 2, -4, 7, 9, 65536, 4, 9, 0};
// The fixture for testing GenericHeap and DoublePtr.
class HeapTest : public testing::Test {
public:
virtual ~HeapTest();
virtual ~HeapTest();
// Pushes the test data onto both the heap and the KDVector.
void PushTestData(GenericHeap<IntKDPair>* heap, KDVector* v) {
for (int i = 0; i < ARRAYSIZE(test_data); ++i) {
@ -51,7 +51,7 @@ class HeapTest : public testing::Test {
// Indices don't necessarily match for equal keys, so don't test them.
if (i + 1 < v->size() && (*v)[i + 1].key == (*v)[i].key) {
while (i + 1 < v->size() && (*v)[i + 1].key == (*v)[i].key) {
heap->Pop(NULL);
heap->Pop(nullptr);
++i;
EXPECT_FALSE(heap->empty());
EXPECT_EQ((*v)[i].key, heap->PeekTop().key);
@ -61,7 +61,7 @@ class HeapTest : public testing::Test {
EXPECT_EQ((*v)[i].data, heap->PeekTop().data);
}
EXPECT_FALSE(heap->empty());
EXPECT_TRUE(heap->Pop(NULL));
EXPECT_TRUE(heap->Pop(nullptr));
}
EXPECT_TRUE(heap->empty());
}
@ -95,7 +95,7 @@ TEST_F(HeapTest, MixedTest) {
// Sort the vector and remove the first 5 values from both heap and v.
v.sort();
for (int i = 0; i < 5; ++i) {
heap.Pop(NULL);
heap.Pop(nullptr);
v.remove(0);
}
// Push the test data onto both the heap and the KDVector.
@ -162,7 +162,7 @@ TEST_F(HeapTest, RevalueTest) {
for (int i = 0; i < v.size(); ++i) {
EXPECT_EQ(v[i].key, heap.PeekTop().key);
EXPECT_FALSE(heap.empty());
heap.Pop(NULL);
heap.Pop(nullptr);
}
EXPECT_TRUE(heap.empty());
}
@ -174,7 +174,7 @@ TEST_F(HeapTest, RevalueTest) {
static void ConstRefTest(const DoublePtr& ptr1) {
DoublePtr ptr2(ptr1); // Compiler error here.
EXPECT_EQ(&ptr2, ptr2.OtherEnd()->OtherEnd());
EXPECT_TRUE(ptr1.OtherEnd() == NULL);
EXPECT_TRUE(ptr1.OtherEnd() == nullptr);
}
#endif
@ -186,11 +186,11 @@ TEST_F(HeapTest, DoublePtrTest) {
// Check that the correct copy constructor is used.
DoublePtr ptr3(ptr1);
EXPECT_EQ(&ptr3, ptr3.OtherEnd()->OtherEnd());
EXPECT_TRUE(ptr1.OtherEnd() == NULL);
EXPECT_TRUE(ptr1.OtherEnd() == nullptr);
// Check that the correct operator= is used.
ptr1 = ptr3;
EXPECT_EQ(&ptr1, ptr1.OtherEnd()->OtherEnd());
EXPECT_TRUE(ptr3.OtherEnd() == NULL);
EXPECT_TRUE(ptr3.OtherEnd() == nullptr);
}
} // namespace tesseract

View File

@ -12,8 +12,7 @@ namespace {
class ImagedataTest : public ::testing::Test {
protected:
ImagedataTest() {
}
ImagedataTest() {}
// Creates a fake DocumentData, writes it to a file, and returns the filename.
string MakeFakeDoc(int num_pages, int doc_id,
@ -31,14 +30,14 @@ class ImagedataTest : public ::testing::Test {
// Make an imagedata and put it in the document.
ImageData* imagedata =
ImageData::Build("noname", p, "eng", fake_image.data(),
fake_image.size(), (*page_texts)[p].c_str(), NULL);
fake_image.size(), (*page_texts)[p].c_str(), nullptr);
EXPECT_EQ(kImageSize, imagedata->MemoryUsed());
write_doc.AddPageToDocument(imagedata);
}
// Write it to a file.
string filename = file::JoinPath(
FLAGS_test_tmpdir, absl::StrCat("documentdata", doc_id, ".lstmf"));
EXPECT_TRUE(write_doc.SaveDocument(filename.c_str(), NULL));
EXPECT_TRUE(write_doc.SaveDocument(filename.c_str(), nullptr));
return filename;
}
};
@ -51,7 +50,7 @@ TEST_F(ImagedataTest, CachesProperly) {
// Allowances to read the document. Big enough for 1, 3, 0, all pages.
const int kMemoryAllowances[] = {2000000, 4000000, 1000000, 100000000, 0};
// Order in which to read the pages, with some sequential and some seeks.
const int kPageReadOrder[] = { 0, 1, 2, 3, 8, 4, 5, 6, 7, 11, 10, 9, -1 };
const int kPageReadOrder[] = {0, 1, 2, 3, 8, 4, 5, 6, 7, 11, 10, 9, -1};
std::vector<string> page_texts;
string filename = MakeFakeDoc(kNumPages, 0, &page_texts);
@ -60,13 +59,13 @@ TEST_F(ImagedataTest, CachesProperly) {
for (int m = 0; kMemoryAllowances[m] > 0; ++m) {
DocumentData read_doc("My document");
EXPECT_TRUE(
read_doc.LoadDocument(filename.c_str(), 0, kMemoryAllowances[m], NULL));
read_doc.LoadDocument(filename.c_str(), 0, kMemoryAllowances[m], nullptr));
LOG(ERROR) << "Allowance = " << kMemoryAllowances[m];
// Read the pages in a specific order.
for (int p = 0; kPageReadOrder[p] >= 0; ++p) {
int page = kPageReadOrder[p];
const ImageData* imagedata = read_doc.GetPage(page);
EXPECT_NE(reinterpret_cast<const ImageData*>(NULL), imagedata);
EXPECT_NE(reinterpret_cast<const ImageData*>(nullptr), imagedata);
// Check that this is the right page.
EXPECT_STREQ(page_texts[page].c_str(),
imagedata->transcription().string());

View File

@ -13,14 +13,13 @@
#ifndef TESSERACT_UNITTEST_INCLUDE_GUNIT_H_
#define TESSERACT_UNITTEST_INCLUDE_GUNIT_H_
#include "errcode.h" // for ASSERT_HOST
#include "fileio.h" // for tesseract::File
#include "gtest/gtest.h"
#include "errcode.h" // for ASSERT_HOST
#include "fileio.h" // for tesseract::File
const char* FLAGS_test_tmpdir = ".";
class file: public tesseract::File {
};
class file : public tesseract::File {};
#define ABSL_ARRAYSIZE(arr) (sizeof(arr) / sizeof(arr[0]))
#define ARRAYSIZE(arr) (sizeof(arr) / sizeof(arr[0]))

View File

@ -34,13 +34,11 @@ class IndexMapBiDiTest : public testing::Test {
map->Init(kPrimeLimit + 1, false);
map->SetMap(2, true);
// Set all the odds to true.
for (int i = 3; i <= kPrimeLimit; i += 2)
map->SetMap(i, true);
for (int i = 3; i <= kPrimeLimit; i += 2) map->SetMap(i, true);
int factor_limit = static_cast<int>(sqrt(1.0 + kPrimeLimit));
for (int f = 3; f <= factor_limit; f += 2) {
if (map->SparseToCompact(f) >= 0) {
for (int m = 2; m * f <= kPrimeLimit; ++m)
map->SetMap(f * m, false);
for (int m = 2; m * f <= kPrimeLimit; ++m) map->SetMap(f * m, false);
}
}
map->Setup();

View File

@ -74,8 +74,8 @@ TEST_F(IntFeatureMapTest, Exhaustive) {
int dtheta = kIntFeatureExtent / kThetaBuckets + 1;
int bad_offsets = 0;
for (int index = 0; index < total_buckets; ++index) {
for (int dir = -tesseract::kNumOffsetMaps;
dir <= tesseract::kNumOffsetMaps; ++dir) {
for (int dir = -tesseract::kNumOffsetMaps; dir <= tesseract::kNumOffsetMaps;
++dir) {
int offset_index = map.OffsetFeature(index, dir);
if (dir == 0) {
EXPECT_EQ(index, offset_index);
@ -101,7 +101,7 @@ TEST_F(IntFeatureMapTest, Exhaustive) {
// test again.
map.DeleteMapFeature(0);
map.DeleteMapFeature(total_buckets - 1);
map.FinalizeMapping(NULL);
map.FinalizeMapping(nullptr);
map.IndexAndSortFeatures(features.get(), total_size, &index_features);
// Has no effect on index features.
EXPECT_EQ(total_size, index_features.size());

View File

@ -7,8 +7,7 @@ namespace tesseract {
namespace {
string TestDataNameToPath(const string& name) {
return file::JoinPath(FLAGS_test_srcdir,
"testdata", name);
return file::JoinPath(FLAGS_test_srcdir, "testdata", name);
}
// This is an integration test that verifies that CombineLangModel works to

View File

@ -13,35 +13,27 @@
namespace {
using tesseract::MutableIterator;
using tesseract::ResultIterator;
using tesseract::PageIteratorLevel;
using tesseract::ResultIterator;
const char* kStrings8087_054[] = {
"dat", "Dalmatian", "", "DAMAGED DURING", "margarine,", NULL
};
const PolyBlockType kBlocks8087_054[] = {
PT_HEADING_TEXT, PT_FLOWING_TEXT, PT_PULLOUT_IMAGE,
PT_CAPTION_TEXT, PT_FLOWING_TEXT
};
"dat", "Dalmatian", "", "DAMAGED DURING", "margarine,", nullptr};
const PolyBlockType kBlocks8087_054[] = {PT_HEADING_TEXT, PT_FLOWING_TEXT,
PT_PULLOUT_IMAGE, PT_CAPTION_TEXT,
PT_FLOWING_TEXT};
// The fixture for testing Tesseract.
class LayoutTest : public testing::Test {
protected:
string TestDataNameToPath(const string& name) {
return file::JoinPath(FLAGS_test_srcdir,
"testdata/" + name);
return file::JoinPath(FLAGS_test_srcdir, "testdata/" + name);
}
string TessdataPath() {
return file::JoinPath(FLAGS_test_srcdir,
"tessdata");
return file::JoinPath(FLAGS_test_srcdir, "tessdata");
}
LayoutTest() {
src_pix_ = NULL;
}
~LayoutTest() {
pixDestroy(&src_pix_);
}
LayoutTest() { src_pix_ = nullptr; }
~LayoutTest() { pixDestroy(&src_pix_); }
void SetImage(const char* filename, const char* lang) {
pixDestroy(&src_pix_);
@ -54,41 +46,38 @@ class LayoutTest : public testing::Test {
// Tests reading order and block finding (very roughly) by iterating
// over the blocks, expecting that they contain the strings in order,
// allowing for other blocks in between.
// An empty string should match an image block, and a NULL string
// An empty string should match an image block, and a nullptr string
// indicates the end of the array.
void VerifyBlockTextOrder(const char* strings[],
const PolyBlockType* blocks,
void VerifyBlockTextOrder(const char* strings[], const PolyBlockType* blocks,
ResultIterator* it) {
it->Begin();
int string_index = 0;
int block_index = 0;
do {
char* block_text = it->GetUTF8Text(tesseract::RIL_BLOCK);
if (block_text != NULL &&
it->BlockType() == blocks[string_index] &&
strstr(block_text, strings[string_index]) != NULL) {
if (block_text != nullptr && it->BlockType() == blocks[string_index] &&
strstr(block_text, strings[string_index]) != nullptr) {
VLOG(1) << StringPrintf("Found string %s in block %d of type %s",
strings[string_index], block_index,
kPolyBlockNames[blocks[string_index]]);
// Found this one.
++string_index;
} else if (it->BlockType() == blocks[string_index] &&
block_text == NULL && strings[string_index][0] == '\0') {
VLOG(1) << StringPrintf("Found block of type %s at block %d",
kPolyBlockNames[blocks[string_index]],
block_index);
// Found this one.
++string_index;
block_text == nullptr && strings[string_index][0] == '\0') {
VLOG(1) << StringPrintf("Found block of type %s at block %d",
kPolyBlockNames[blocks[string_index]],
block_index);
// Found this one.
++string_index;
} else {
VLOG(1) << StringPrintf("No match found in block with text:\n%s",
block_text);
}
delete [] block_text;
delete[] block_text;
++block_index;
if (strings[string_index] == NULL)
break;
if (strings[string_index] == nullptr) break;
} while (it->Next(tesseract::RIL_BLOCK));
EXPECT_TRUE(strings[string_index] == NULL);
EXPECT_TRUE(strings[string_index] == nullptr);
}
// Tests that approximate order of the biggest text blocks is correct.
@ -105,8 +94,8 @@ class LayoutTest : public testing::Test {
do {
int left, top, right, bottom;
if (it->BoundingBox(tesseract::RIL_BLOCK, &left, &top, &right, &bottom) &&
PTIsTextType(it->BlockType()) &&
right - left > 800 && bottom - top > 200) {
PTIsTextType(it->BlockType()) && right - left > 800 &&
bottom - top > 200) {
if (prev_right > prev_left) {
if (min(right, prev_right) > max(left, prev_left)) {
EXPECT_GE(top, prev_bottom) << "Overlapping block should be below";
@ -134,11 +123,11 @@ class LayoutTest : public testing::Test {
do {
int left, top, right, bottom;
if (it->BoundingBox(tesseract::RIL_BLOCK, &left, &top, &right, &bottom) &&
PTIsTextType(it->BlockType()) &&
right - left > 800 && bottom - top > 200 ) {
PTIsTextType(it->BlockType()) && right - left > 800 &&
bottom - top > 200) {
const PAGE_RES_IT* pr_it = it->PageResIt();
POLY_BLOCK* pb = pr_it->block()->block->poly_block();
CHECK(pb != NULL);
CHECK(pb != nullptr);
FCOORD skew = pr_it->block()->block->skew();
EXPECT_GT(skew.x(), 0.0f);
EXPECT_GT(skew.y(), 0.0f);
@ -148,11 +137,11 @@ class LayoutTest : public testing::Test {
const PAGE_RES_IT* w_it = word_it.PageResIt();
// Iterate the blobs in the word.
C_BLOB_IT b_it(w_it->word()->word->cblob_list());
for (b_it.mark_cycle_pt();!b_it.cycled_list(); b_it.forward()) {
for (b_it.mark_cycle_pt(); !b_it.cycled_list(); b_it.forward()) {
C_BLOB* blob = b_it.data();
// Iterate the outlines in the blob.
C_OUTLINE_IT ol_it(blob->out_list());
for (ol_it.mark_cycle_pt();!ol_it.cycled_list(); ol_it.forward()) {
for (ol_it.mark_cycle_pt(); !ol_it.cycled_list(); ol_it.forward()) {
C_OUTLINE* ol = ol_it.data();
TBOX box = ol->bounding_box();
ICOORD middle((box.left() + box.right()) / 2,
@ -176,7 +165,7 @@ class LayoutTest : public testing::Test {
TEST_F(LayoutTest, UNLV8087_054) {
SetImage("8087_054.3B.tif", "eng");
// Just run recognition.
EXPECT_EQ(api_.Recognize(NULL), 0);
EXPECT_EQ(api_.Recognize(nullptr), 0);
// Check iterator position.
tesseract::ResultIterator* it = api_.GetIterator();
VerifyBlockTextOrder(kStrings8087_054, kBlocks8087_054, it);
@ -188,7 +177,7 @@ TEST_F(LayoutTest, UNLV8087_054) {
TEST_F(LayoutTest, HebrewOrderingAndSkew) {
SetImage("GOOGLE:13510798882202548:74:84.sj-79.tif", "eng");
// Just run recognition.
EXPECT_EQ(api_.Recognize(NULL), 0);
EXPECT_EQ(api_.Recognize(nullptr), 0);
tesseract::MutableIterator* it = api_.GetMutableIterator();
// In eng mode, block order should not be RTL.
VerifyRoughBlockOrder(false, it);
@ -197,7 +186,7 @@ TEST_F(LayoutTest, HebrewOrderingAndSkew) {
// Now try again using Hebrew.
SetImage("GOOGLE:13510798882202548:74:84.sj-79.tif", "heb");
// Just run recognition.
EXPECT_EQ(api_.Recognize(NULL), 0);
EXPECT_EQ(api_.Recognize(nullptr), 0);
it = api_.GetMutableIterator();
// In heb mode, block order should be RTL.
VerifyRoughBlockOrder(true, it);

View File

@ -25,13 +25,10 @@ const char kRenderableEngLigatureText[] = "fidelity effigy ſteep";
class LigatureTableTest : public ::testing::Test {
protected:
static void SetUpTestCase() {
FLAGS_fonts_dir = File::JoinPath(
FLAGS_test_srcdir, "testdata");
FLAGS_fonts_dir = File::JoinPath(FLAGS_test_srcdir, "testdata");
FLAGS_fontconfig_tmpdir = FLAGS_test_tmpdir;
}
void SetUp() {
lig_table_ = LigatureTable::Get();
}
void SetUp() { lig_table_ = LigatureTable::Get(); }
LigatureTable* lig_table_;
};
@ -42,7 +39,7 @@ TEST_F(LigatureTableTest, DoesFillLigatureTables) {
TEST_F(LigatureTableTest, DoesAddLigatures) {
EXPECT_STREQ(kEngLigatureText,
lig_table_->AddLigatures(kEngNonLigatureText, NULL).c_str());
lig_table_->AddLigatures(kEngNonLigatureText, nullptr).c_str());
}
TEST_F(LigatureTableTest, DoesAddLigaturesWithSupportedFont) {
@ -66,15 +63,12 @@ TEST_F(LigatureTableTest, DoesRemoveLigatures) {
TEST_F(LigatureTableTest, TestCustomLigatures) {
const char* kTestCases[] = {
"act", "a\uE003",
"publiſh", "publi\uE006",
"ſince", "\uE007nce",
"aſleep", "a\uE008eep",
"neceſſary", "nece\uE009ary",
"act", "a\uE003", "publiſh", "publi\uE006", "ſince",
"\uE007nce", "aſleep", "a\uE008eep", "neceſſary", "nece\uE009ary",
};
for (int i = 0; i < ARRAYSIZE(kTestCases); i += 2) {
EXPECT_STREQ(kTestCases[i + 1],
lig_table_->AddLigatures(kTestCases[i], NULL).c_str());
lig_table_->AddLigatures(kTestCases[i], nullptr).c_str());
EXPECT_STREQ(kTestCases[i],
lig_table_->RemoveLigatures(kTestCases[i + 1]).c_str());
EXPECT_STREQ(kTestCases[i],
@ -84,13 +78,15 @@ TEST_F(LigatureTableTest, TestCustomLigatures) {
TEST_F(LigatureTableTest, TestRemovesCustomLigatures) {
const char* kTestCases[] = {
"fiction", "\uE003ion", "fiction",
"fiction",
"\uE003ion",
"fiction",
};
for (int i = 0; i < ARRAYSIZE(kTestCases); i += 3) {
EXPECT_STREQ(kTestCases[i + 1],
lig_table_->AddLigatures(kTestCases[i], NULL).c_str());
lig_table_->AddLigatures(kTestCases[i], nullptr).c_str());
EXPECT_STREQ(kTestCases[i + 2],
lig_table_->RemoveCustomLigatures(kTestCases[i + 1]).c_str());
}
}
}
} // namespace

View File

@ -17,11 +17,9 @@ namespace {
class LLSQTest : public testing::Test {
public:
void SetUp() {
}
void SetUp() {}
void TearDown() {
}
void TearDown() {}
void ExpectCorrectLine(const LLSQ& llsq, double m, double c, double rms,
double pearson, double tolerance) {
@ -30,14 +28,14 @@ class LLSQTest : public testing::Test {
EXPECT_NEAR(rms, llsq.rms(llsq.m(), llsq.c(llsq.m())), tolerance);
EXPECT_NEAR(pearson, llsq.pearson(), tolerance);
}
FCOORD PtsMean(const std::vector<FCOORD> &pts) {
FCOORD PtsMean(const std::vector<FCOORD>& pts) {
FCOORD total(0, 0);
for (int i = 0; i < pts.size(); i++) {
total += pts[i];
}
return (pts.size() > 0) ? total / pts.size() : total;
}
void VerifyRmsOrth(const std::vector<FCOORD> &pts, const FCOORD &orth) {
void VerifyRmsOrth(const std::vector<FCOORD>& pts, const FCOORD& orth) {
LLSQ llsq;
FCOORD xavg = PtsMean(pts);
FCOORD nvec = !orth;
@ -79,13 +77,11 @@ TEST_F(LLSQTest, BasicLines) {
// The point at 1,2 pulls the result away from what would otherwise be a
// perfect fit to a horizontal line by 0.25 unit, with rms error of 0.433.
ExpectCorrectLine(llsq, 0.0, 1.25, 0.433, 0.0, 1e-2);
ExpectCorrectVector(llsq, FCOORD(1.0f, 1.25f),
FCOORD(1.0f, 0.0f), 1e-3);
ExpectCorrectVector(llsq, FCOORD(1.0f, 1.25f), FCOORD(1.0f, 0.0f), 1e-3);
llsq.add(1.0, 2.0, 10.0);
// With a heavy weight, the point at 1,2 pulls the line nearer.
ExpectCorrectLine(llsq, 0.0, 1.786, 0.41, 0.0, 1e-2);
ExpectCorrectVector(llsq, FCOORD(1.0f, 1.786f),
FCOORD(1.0f, 0.0f), 1e-3);
ExpectCorrectVector(llsq, FCOORD(1.0f, 1.786f), FCOORD(1.0f, 0.0f), 1e-3);
}
// Tests a simple baseline-style normalization with a rotation.
@ -93,14 +89,12 @@ TEST_F(LLSQTest, Vectors) {
LLSQ llsq;
llsq.add(1.0, 1.0);
llsq.add(1.0, -1.0);
ExpectCorrectVector(llsq, FCOORD(1.0f, 0.0f),
FCOORD(0.0f, 1.0f), 1e-6);
ExpectCorrectVector(llsq, FCOORD(1.0f, 0.0f), FCOORD(0.0f, 1.0f), 1e-6);
llsq.add(0.9, -2.0);
llsq.add(1.1, -3.0);
llsq.add(0.9, 2.0);
llsq.add(1.10001, 3.0);
ExpectCorrectVector(llsq, FCOORD(1.0f, 0.0f),
FCOORD(0.0f, 1.0f), 1e-3);
ExpectCorrectVector(llsq, FCOORD(1.0f, 0.0f), FCOORD(0.0f, 1.0f), 1e-3);
}
// Verify that rms_orth() actually calculates:
@ -112,10 +106,10 @@ TEST_F(LLSQTest, RmsOrthWorksAsIntended) {
pts.push_back(FCOORD(0.13, 0.77));
pts.push_back(FCOORD(0.16, 0.83));
pts.push_back(FCOORD(0.45, 0.79));
VerifyRmsOrth(pts, FCOORD(1,0));
VerifyRmsOrth(pts, FCOORD(1,1));
VerifyRmsOrth(pts, FCOORD(1,2));
VerifyRmsOrth(pts, FCOORD(2,1));
VerifyRmsOrth(pts, FCOORD(1, 0));
VerifyRmsOrth(pts, FCOORD(1, 1));
VerifyRmsOrth(pts, FCOORD(1, 2));
VerifyRmsOrth(pts, FCOORD(2, 1));
}
} // namespace.

View File

@ -2,8 +2,8 @@
// File: loadlang_test.cc
// Description: Test loading of All languages and Scripts for Tesseract.
// Tests for All languages and scripts are Disabled by default.
// Force the disabled test to run if required by using the --gtest_also_run_disabled_tests argument.
// Author: Shree Devi Kumar
// Force the disabled test to run if required by using the
// --gtest_also_run_disabled_tests argument. Author: Shree Devi Kumar
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@ -16,230 +16,235 @@
// limitations under the License.
///////////////////////////////////////////////////////////////////////
#include "include_gunit.h"
#include "baseapi.h"
#include <time.h>
#include "baseapi.h"
#include "include_gunit.h"
namespace {
class QuickTest : public testing::Test {
protected:
virtual void SetUp() {
start_time_ = time(nullptr);
}
virtual void SetUp() { start_time_ = time(nullptr); }
virtual void TearDown() {
const time_t end_time = time(nullptr);
EXPECT_TRUE(end_time - start_time_ <=25) << "The test took too long - " << ::testing::PrintToString(end_time - start_time_);
EXPECT_TRUE(end_time - start_time_ <= 25)
<< "The test took too long - "
<< ::testing::PrintToString(end_time - start_time_);
}
time_t start_time_;
};
};
void LangLoader(const char* lang, const char* tessdatadir) {
tesseract::TessBaseAPI *api = new tesseract::TessBaseAPI();
ASSERT_FALSE(api->Init(tessdatadir, lang)) << "Could not initialize tesseract for $lang.";
api->End();
}
void LangLoader(const char* lang, const char* tessdatadir) {
tesseract::TessBaseAPI* api = new tesseract::TessBaseAPI();
ASSERT_FALSE(api->Init(tessdatadir, lang))
<< "Could not initialize tesseract for $lang.";
api->End();
}
// For all languages
class LoadLanguage : public QuickTest ,
public ::testing::WithParamInterface<const char*> {
};
class LoadLanguage : public QuickTest,
public ::testing::WithParamInterface<const char*> {};
TEST_P(LoadLanguage, afr) {LangLoader("afr" , GetParam());}
TEST_P(LoadLanguage, amh) {LangLoader("amh" , GetParam());}
TEST_P(LoadLanguage, ara) {LangLoader("ara" , GetParam());}
TEST_P(LoadLanguage, asm) {LangLoader("asm" , GetParam());}
TEST_P(LoadLanguage, aze) {LangLoader("aze" , GetParam());}
TEST_P(LoadLanguage, aze_cyrl) {LangLoader("aze_cyrl" , GetParam());}
TEST_P(LoadLanguage, bel) {LangLoader("bel" , GetParam());}
TEST_P(LoadLanguage, ben) {LangLoader("ben" , GetParam());}
TEST_P(LoadLanguage, bod) {LangLoader("bod" , GetParam());}
TEST_P(LoadLanguage, bos) {LangLoader("bos" , GetParam());}
TEST_P(LoadLanguage, bre) {LangLoader("bre" , GetParam());}
TEST_P(LoadLanguage, bul) {LangLoader("bul" , GetParam());}
TEST_P(LoadLanguage, cat) {LangLoader("cat" , GetParam());}
TEST_P(LoadLanguage, ceb) {LangLoader("ceb" , GetParam());}
TEST_P(LoadLanguage, ces) {LangLoader("ces" , GetParam());}
TEST_P(LoadLanguage, chi_sim) {LangLoader("chi_sim" , GetParam());}
TEST_P(LoadLanguage, chi_sim_vert) {LangLoader("chi_sim_vert" , GetParam());}
TEST_P(LoadLanguage, chi_tra) {LangLoader("chi_tra" , GetParam());}
TEST_P(LoadLanguage, chi_tra_vert) {LangLoader("chi_tra_vert" , GetParam());}
TEST_P(LoadLanguage, chr) {LangLoader("chr" , GetParam());}
TEST_P(LoadLanguage, cos) {LangLoader("cos" , GetParam());}
TEST_P(LoadLanguage, cym) {LangLoader("cym" , GetParam());}
TEST_P(LoadLanguage, dan) {LangLoader("dan" , GetParam());}
TEST_P(LoadLanguage, deu) {LangLoader("deu" , GetParam());}
TEST_P(LoadLanguage, div) {LangLoader("div" , GetParam());}
TEST_P(LoadLanguage, dzo) {LangLoader("dzo" , GetParam());}
TEST_P(LoadLanguage, ell) {LangLoader("ell" , GetParam());}
TEST_P(LoadLanguage, eng) {LangLoader("eng" , GetParam());}
TEST_P(LoadLanguage, enm) {LangLoader("enm" , GetParam());}
TEST_P(LoadLanguage, epo) {LangLoader("epo" , GetParam());}
TEST_P(LoadLanguage, est) {LangLoader("est" , GetParam());}
TEST_P(LoadLanguage, eus) {LangLoader("eus" , GetParam());}
TEST_P(LoadLanguage, fao) {LangLoader("fao" , GetParam());}
TEST_P(LoadLanguage, fas) {LangLoader("fas" , GetParam());}
TEST_P(LoadLanguage, fil) {LangLoader("fil" , GetParam());}
TEST_P(LoadLanguage, fin) {LangLoader("fin" , GetParam());}
TEST_P(LoadLanguage, fra) {LangLoader("fra" , GetParam());}
TEST_P(LoadLanguage, frk) {LangLoader("frk" , GetParam());}
TEST_P(LoadLanguage, frm) {LangLoader("frm" , GetParam());}
TEST_P(LoadLanguage, fry) {LangLoader("fry" , GetParam());}
TEST_P(LoadLanguage, gla) {LangLoader("gla" , GetParam());}
TEST_P(LoadLanguage, gle) {LangLoader("gle" , GetParam());}
TEST_P(LoadLanguage, glg) {LangLoader("glg" , GetParam());}
TEST_P(LoadLanguage, grc) {LangLoader("grc" , GetParam());}
TEST_P(LoadLanguage, guj) {LangLoader("guj" , GetParam());}
TEST_P(LoadLanguage, hat) {LangLoader("hat" , GetParam());}
TEST_P(LoadLanguage, heb) {LangLoader("heb" , GetParam());}
TEST_P(LoadLanguage, hin) {LangLoader("hin" , GetParam());}
TEST_P(LoadLanguage, hrv) {LangLoader("hrv" , GetParam());}
TEST_P(LoadLanguage, hun) {LangLoader("hun" , GetParam());}
TEST_P(LoadLanguage, hye) {LangLoader("hye" , GetParam());}
TEST_P(LoadLanguage, iku) {LangLoader("iku" , GetParam());}
TEST_P(LoadLanguage, ind) {LangLoader("ind" , GetParam());}
TEST_P(LoadLanguage, isl) {LangLoader("isl" , GetParam());}
TEST_P(LoadLanguage, ita) {LangLoader("ita" , GetParam());}
TEST_P(LoadLanguage, ita_old) {LangLoader("ita_old" , GetParam());}
TEST_P(LoadLanguage, jav) {LangLoader("jav" , GetParam());}
TEST_P(LoadLanguage, jpn) {LangLoader("jpn" , GetParam());}
TEST_P(LoadLanguage, jpn_vert) {LangLoader("jpn_vert" , GetParam());}
TEST_P(LoadLanguage, kan) {LangLoader("kan" , GetParam());}
TEST_P(LoadLanguage, kat) {LangLoader("kat" , GetParam());}
TEST_P(LoadLanguage, kat_old) {LangLoader("kat_old" , GetParam());}
TEST_P(LoadLanguage, kaz) {LangLoader("kaz" , GetParam());}
TEST_P(LoadLanguage, khm) {LangLoader("khm" , GetParam());}
TEST_P(LoadLanguage, kir) {LangLoader("kir" , GetParam());}
TEST_P(LoadLanguage, afr) { LangLoader("afr", GetParam()); }
TEST_P(LoadLanguage, amh) { LangLoader("amh", GetParam()); }
TEST_P(LoadLanguage, ara) { LangLoader("ara", GetParam()); }
TEST_P(LoadLanguage, asm) { LangLoader("asm", GetParam()); }
TEST_P(LoadLanguage, aze) { LangLoader("aze", GetParam()); }
TEST_P(LoadLanguage, aze_cyrl) { LangLoader("aze_cyrl", GetParam()); }
TEST_P(LoadLanguage, bel) { LangLoader("bel", GetParam()); }
TEST_P(LoadLanguage, ben) { LangLoader("ben", GetParam()); }
TEST_P(LoadLanguage, bod) { LangLoader("bod", GetParam()); }
TEST_P(LoadLanguage, bos) { LangLoader("bos", GetParam()); }
TEST_P(LoadLanguage, bre) { LangLoader("bre", GetParam()); }
TEST_P(LoadLanguage, bul) { LangLoader("bul", GetParam()); }
TEST_P(LoadLanguage, cat) { LangLoader("cat", GetParam()); }
TEST_P(LoadLanguage, ceb) { LangLoader("ceb", GetParam()); }
TEST_P(LoadLanguage, ces) { LangLoader("ces", GetParam()); }
TEST_P(LoadLanguage, chi_sim) { LangLoader("chi_sim", GetParam()); }
TEST_P(LoadLanguage, chi_sim_vert) { LangLoader("chi_sim_vert", GetParam()); }
TEST_P(LoadLanguage, chi_tra) { LangLoader("chi_tra", GetParam()); }
TEST_P(LoadLanguage, chi_tra_vert) { LangLoader("chi_tra_vert", GetParam()); }
TEST_P(LoadLanguage, chr) { LangLoader("chr", GetParam()); }
TEST_P(LoadLanguage, cos) { LangLoader("cos", GetParam()); }
TEST_P(LoadLanguage, cym) { LangLoader("cym", GetParam()); }
TEST_P(LoadLanguage, dan) { LangLoader("dan", GetParam()); }
TEST_P(LoadLanguage, deu) { LangLoader("deu", GetParam()); }
TEST_P(LoadLanguage, div) { LangLoader("div", GetParam()); }
TEST_P(LoadLanguage, dzo) { LangLoader("dzo", GetParam()); }
TEST_P(LoadLanguage, ell) { LangLoader("ell", GetParam()); }
TEST_P(LoadLanguage, eng) { LangLoader("eng", GetParam()); }
TEST_P(LoadLanguage, enm) { LangLoader("enm", GetParam()); }
TEST_P(LoadLanguage, epo) { LangLoader("epo", GetParam()); }
TEST_P(LoadLanguage, est) { LangLoader("est", GetParam()); }
TEST_P(LoadLanguage, eus) { LangLoader("eus", GetParam()); }
TEST_P(LoadLanguage, fao) { LangLoader("fao", GetParam()); }
TEST_P(LoadLanguage, fas) { LangLoader("fas", GetParam()); }
TEST_P(LoadLanguage, fil) { LangLoader("fil", GetParam()); }
TEST_P(LoadLanguage, fin) { LangLoader("fin", GetParam()); }
TEST_P(LoadLanguage, fra) { LangLoader("fra", GetParam()); }
TEST_P(LoadLanguage, frk) { LangLoader("frk", GetParam()); }
TEST_P(LoadLanguage, frm) { LangLoader("frm", GetParam()); }
TEST_P(LoadLanguage, fry) { LangLoader("fry", GetParam()); }
TEST_P(LoadLanguage, gla) { LangLoader("gla", GetParam()); }
TEST_P(LoadLanguage, gle) { LangLoader("gle", GetParam()); }
TEST_P(LoadLanguage, glg) { LangLoader("glg", GetParam()); }
TEST_P(LoadLanguage, grc) { LangLoader("grc", GetParam()); }
TEST_P(LoadLanguage, guj) { LangLoader("guj", GetParam()); }
TEST_P(LoadLanguage, hat) { LangLoader("hat", GetParam()); }
TEST_P(LoadLanguage, heb) { LangLoader("heb", GetParam()); }
TEST_P(LoadLanguage, hin) { LangLoader("hin", GetParam()); }
TEST_P(LoadLanguage, hrv) { LangLoader("hrv", GetParam()); }
TEST_P(LoadLanguage, hun) { LangLoader("hun", GetParam()); }
TEST_P(LoadLanguage, hye) { LangLoader("hye", GetParam()); }
TEST_P(LoadLanguage, iku) { LangLoader("iku", GetParam()); }
TEST_P(LoadLanguage, ind) { LangLoader("ind", GetParam()); }
TEST_P(LoadLanguage, isl) { LangLoader("isl", GetParam()); }
TEST_P(LoadLanguage, ita) { LangLoader("ita", GetParam()); }
TEST_P(LoadLanguage, ita_old) { LangLoader("ita_old", GetParam()); }
TEST_P(LoadLanguage, jav) { LangLoader("jav", GetParam()); }
TEST_P(LoadLanguage, jpn) { LangLoader("jpn", GetParam()); }
TEST_P(LoadLanguage, jpn_vert) { LangLoader("jpn_vert", GetParam()); }
TEST_P(LoadLanguage, kan) { LangLoader("kan", GetParam()); }
TEST_P(LoadLanguage, kat) { LangLoader("kat", GetParam()); }
TEST_P(LoadLanguage, kat_old) { LangLoader("kat_old", GetParam()); }
TEST_P(LoadLanguage, kaz) { LangLoader("kaz", GetParam()); }
TEST_P(LoadLanguage, khm) { LangLoader("khm", GetParam()); }
TEST_P(LoadLanguage, kir) { LangLoader("kir", GetParam()); }
// TEST_P(LoadLanguage, kmr) {LangLoader("kmr" , GetParam());}
TEST_P(LoadLanguage, kor) {LangLoader("kor" , GetParam());}
TEST_P(LoadLanguage, kor_vert) {LangLoader("kor_vert" , GetParam());}
TEST_P(LoadLanguage, lao) {LangLoader("lao" , GetParam());}
TEST_P(LoadLanguage, lat) {LangLoader("lat" , GetParam());}
TEST_P(LoadLanguage, lav) {LangLoader("lav" , GetParam());}
TEST_P(LoadLanguage, lit) {LangLoader("lit" , GetParam());}
TEST_P(LoadLanguage, ltz) {LangLoader("ltz" , GetParam());}
TEST_P(LoadLanguage, mal) {LangLoader("mal" , GetParam());}
TEST_P(LoadLanguage, mar) {LangLoader("mar" , GetParam());}
TEST_P(LoadLanguage, mkd) {LangLoader("mkd" , GetParam());}
TEST_P(LoadLanguage, mlt) {LangLoader("mlt" , GetParam());}
TEST_P(LoadLanguage, mon) {LangLoader("mon" , GetParam());}
TEST_P(LoadLanguage, mri) {LangLoader("mri" , GetParam());}
TEST_P(LoadLanguage, msa) {LangLoader("msa" , GetParam());}
TEST_P(LoadLanguage, mya) {LangLoader("mya" , GetParam());}
TEST_P(LoadLanguage, nep) {LangLoader("nep" , GetParam());}
TEST_P(LoadLanguage, nld) {LangLoader("nld" , GetParam());}
TEST_P(LoadLanguage, nor) {LangLoader("nor" , GetParam());}
TEST_P(LoadLanguage, oci) {LangLoader("oci" , GetParam());}
TEST_P(LoadLanguage, ori) {LangLoader("ori" , GetParam());}
TEST_P(LoadLanguage, osd) {LangLoader("osd" , GetParam());}
TEST_P(LoadLanguage, pan) {LangLoader("pan" , GetParam());}
TEST_P(LoadLanguage, pol) {LangLoader("pol" , GetParam());}
TEST_P(LoadLanguage, por) {LangLoader("por" , GetParam());}
TEST_P(LoadLanguage, pus) {LangLoader("pus" , GetParam());}
TEST_P(LoadLanguage, que) {LangLoader("que" , GetParam());}
TEST_P(LoadLanguage, ron) {LangLoader("ron" , GetParam());}
TEST_P(LoadLanguage, rus) {LangLoader("rus" , GetParam());}
TEST_P(LoadLanguage, san) {LangLoader("san" , GetParam());}
TEST_P(LoadLanguage, sin) {LangLoader("sin" , GetParam());}
TEST_P(LoadLanguage, slk) {LangLoader("slk" , GetParam());}
TEST_P(LoadLanguage, slv) {LangLoader("slv" , GetParam());}
TEST_P(LoadLanguage, snd) {LangLoader("snd" , GetParam());}
TEST_P(LoadLanguage, spa) {LangLoader("spa" , GetParam());}
TEST_P(LoadLanguage, spa_old) {LangLoader("spa_old" , GetParam());}
TEST_P(LoadLanguage, sqi) {LangLoader("sqi" , GetParam());}
TEST_P(LoadLanguage, srp) {LangLoader("srp" , GetParam());}
TEST_P(LoadLanguage, srp_latn) {LangLoader("srp_latn" , GetParam());}
TEST_P(LoadLanguage, sun) {LangLoader("sun" , GetParam());}
TEST_P(LoadLanguage, swa) {LangLoader("swa" , GetParam());}
TEST_P(LoadLanguage, swe) {LangLoader("swe" , GetParam());}
TEST_P(LoadLanguage, syr) {LangLoader("syr" , GetParam());}
TEST_P(LoadLanguage, tam) {LangLoader("tam" , GetParam());}
TEST_P(LoadLanguage, tat) {LangLoader("tat" , GetParam());}
TEST_P(LoadLanguage, tel) {LangLoader("tel" , GetParam());}
TEST_P(LoadLanguage, tgk) {LangLoader("tgk" , GetParam());}
TEST_P(LoadLanguage, tha) {LangLoader("tha" , GetParam());}
TEST_P(LoadLanguage, tir) {LangLoader("tir" , GetParam());}
TEST_P(LoadLanguage, ton) {LangLoader("ton" , GetParam());}
TEST_P(LoadLanguage, tur) {LangLoader("tur" , GetParam());}
TEST_P(LoadLanguage, uig) {LangLoader("uig" , GetParam());}
TEST_P(LoadLanguage, ukr) {LangLoader("ukr" , GetParam());}
TEST_P(LoadLanguage, urd) {LangLoader("urd" , GetParam());}
TEST_P(LoadLanguage, uzb) {LangLoader("uzb" , GetParam());}
TEST_P(LoadLanguage, uzb_cyrl) {LangLoader("uzb_cyrl" , GetParam());}
TEST_P(LoadLanguage, vie) {LangLoader("vie" , GetParam());}
TEST_P(LoadLanguage, yid) {LangLoader("yid" , GetParam());}
TEST_P(LoadLanguage, yor) {LangLoader("yor" , GetParam());}
TEST_P(LoadLanguage, kor) { LangLoader("kor", GetParam()); }
TEST_P(LoadLanguage, kor_vert) { LangLoader("kor_vert", GetParam()); }
TEST_P(LoadLanguage, lao) { LangLoader("lao", GetParam()); }
TEST_P(LoadLanguage, lat) { LangLoader("lat", GetParam()); }
TEST_P(LoadLanguage, lav) { LangLoader("lav", GetParam()); }
TEST_P(LoadLanguage, lit) { LangLoader("lit", GetParam()); }
TEST_P(LoadLanguage, ltz) { LangLoader("ltz", GetParam()); }
TEST_P(LoadLanguage, mal) { LangLoader("mal", GetParam()); }
TEST_P(LoadLanguage, mar) { LangLoader("mar", GetParam()); }
TEST_P(LoadLanguage, mkd) { LangLoader("mkd", GetParam()); }
TEST_P(LoadLanguage, mlt) { LangLoader("mlt", GetParam()); }
TEST_P(LoadLanguage, mon) { LangLoader("mon", GetParam()); }
TEST_P(LoadLanguage, mri) { LangLoader("mri", GetParam()); }
TEST_P(LoadLanguage, msa) { LangLoader("msa", GetParam()); }
TEST_P(LoadLanguage, mya) { LangLoader("mya", GetParam()); }
TEST_P(LoadLanguage, nep) { LangLoader("nep", GetParam()); }
TEST_P(LoadLanguage, nld) { LangLoader("nld", GetParam()); }
TEST_P(LoadLanguage, nor) { LangLoader("nor", GetParam()); }
TEST_P(LoadLanguage, oci) { LangLoader("oci", GetParam()); }
TEST_P(LoadLanguage, ori) { LangLoader("ori", GetParam()); }
TEST_P(LoadLanguage, osd) { LangLoader("osd", GetParam()); }
TEST_P(LoadLanguage, pan) { LangLoader("pan", GetParam()); }
TEST_P(LoadLanguage, pol) { LangLoader("pol", GetParam()); }
TEST_P(LoadLanguage, por) { LangLoader("por", GetParam()); }
TEST_P(LoadLanguage, pus) { LangLoader("pus", GetParam()); }
TEST_P(LoadLanguage, que) { LangLoader("que", GetParam()); }
TEST_P(LoadLanguage, ron) { LangLoader("ron", GetParam()); }
TEST_P(LoadLanguage, rus) { LangLoader("rus", GetParam()); }
TEST_P(LoadLanguage, san) { LangLoader("san", GetParam()); }
TEST_P(LoadLanguage, sin) { LangLoader("sin", GetParam()); }
TEST_P(LoadLanguage, slk) { LangLoader("slk", GetParam()); }
TEST_P(LoadLanguage, slv) { LangLoader("slv", GetParam()); }
TEST_P(LoadLanguage, snd) { LangLoader("snd", GetParam()); }
TEST_P(LoadLanguage, spa) { LangLoader("spa", GetParam()); }
TEST_P(LoadLanguage, spa_old) { LangLoader("spa_old", GetParam()); }
TEST_P(LoadLanguage, sqi) { LangLoader("sqi", GetParam()); }
TEST_P(LoadLanguage, srp) { LangLoader("srp", GetParam()); }
TEST_P(LoadLanguage, srp_latn) { LangLoader("srp_latn", GetParam()); }
TEST_P(LoadLanguage, sun) { LangLoader("sun", GetParam()); }
TEST_P(LoadLanguage, swa) { LangLoader("swa", GetParam()); }
TEST_P(LoadLanguage, swe) { LangLoader("swe", GetParam()); }
TEST_P(LoadLanguage, syr) { LangLoader("syr", GetParam()); }
TEST_P(LoadLanguage, tam) { LangLoader("tam", GetParam()); }
TEST_P(LoadLanguage, tat) { LangLoader("tat", GetParam()); }
TEST_P(LoadLanguage, tel) { LangLoader("tel", GetParam()); }
TEST_P(LoadLanguage, tgk) { LangLoader("tgk", GetParam()); }
TEST_P(LoadLanguage, tha) { LangLoader("tha", GetParam()); }
TEST_P(LoadLanguage, tir) { LangLoader("tir", GetParam()); }
TEST_P(LoadLanguage, ton) { LangLoader("ton", GetParam()); }
TEST_P(LoadLanguage, tur) { LangLoader("tur", GetParam()); }
TEST_P(LoadLanguage, uig) { LangLoader("uig", GetParam()); }
TEST_P(LoadLanguage, ukr) { LangLoader("ukr", GetParam()); }
TEST_P(LoadLanguage, urd) { LangLoader("urd", GetParam()); }
TEST_P(LoadLanguage, uzb) { LangLoader("uzb", GetParam()); }
TEST_P(LoadLanguage, uzb_cyrl) { LangLoader("uzb_cyrl", GetParam()); }
TEST_P(LoadLanguage, vie) { LangLoader("vie", GetParam()); }
TEST_P(LoadLanguage, yid) { LangLoader("yid", GetParam()); }
TEST_P(LoadLanguage, yor) { LangLoader("yor", GetParam()); }
INSTANTIATE_TEST_CASE_P( DISABLED_Tessdata_fast, LoadLanguage,
::testing::Values(TESSDATA_DIR "_fast") );
INSTANTIATE_TEST_CASE_P( DISABLED_Tessdata_best, LoadLanguage,
::testing::Values(TESSDATA_DIR "_best") );
INSTANTIATE_TEST_CASE_P( DISABLED_Tessdata, LoadLanguage,
::testing::Values(TESSDATA_DIR) );
INSTANTIATE_TEST_CASE_P(DISABLED_Tessdata_fast, LoadLanguage,
::testing::Values(TESSDATA_DIR "_fast"));
INSTANTIATE_TEST_CASE_P(DISABLED_Tessdata_best, LoadLanguage,
::testing::Values(TESSDATA_DIR "_best"));
INSTANTIATE_TEST_CASE_P(DISABLED_Tessdata, LoadLanguage,
::testing::Values(TESSDATA_DIR));
// For all scripts
class LoadScript : public QuickTest ,
public ::testing::WithParamInterface<const char*> {
};
class LoadScript : public QuickTest,
public ::testing::WithParamInterface<const char*> {};
TEST_P(LoadScript, Arabic) {LangLoader("script/Arabic" , GetParam());}
TEST_P(LoadScript, Armenian) {LangLoader("script/Armenian" , GetParam());}
TEST_P(LoadScript, Bengali) {LangLoader("script/Bengali" , GetParam());}
TEST_P(LoadScript, Canadian_Aboriginal) {LangLoader("script/Canadian_Aboriginal" , GetParam());}
TEST_P(LoadScript, Cherokee) {LangLoader("script/Cherokee" , GetParam());}
TEST_P(LoadScript, Cyrillic) {LangLoader("script/Cyrillic" , GetParam());}
TEST_P(LoadScript, Devanagari) {LangLoader("script/Devanagari" , GetParam());}
TEST_P(LoadScript, Ethiopic) {LangLoader("script/Ethiopic" , GetParam());}
TEST_P(LoadScript, Fraktur) {LangLoader("script/Fraktur" , GetParam());}
TEST_P(LoadScript, Georgian) {LangLoader("script/Georgian" , GetParam());}
TEST_P(LoadScript, Greek) {LangLoader("script/Greek" , GetParam());}
TEST_P(LoadScript, Gujarati) {LangLoader("script/Gujarati" , GetParam());}
TEST_P(LoadScript, Gurmukhi) {LangLoader("script/Gurmukhi" , GetParam());}
TEST_P(LoadScript, HanS) {LangLoader("script/HanS" , GetParam());}
TEST_P(LoadScript, HanS_vert) {LangLoader("script/HanS_vert" , GetParam());}
TEST_P(LoadScript, HanT) {LangLoader("script/HanT" , GetParam());}
TEST_P(LoadScript, HanT_vert) {LangLoader("script/HanT_vert" , GetParam());}
TEST_P(LoadScript, Hangul) {LangLoader("script/Hangul" , GetParam());}
TEST_P(LoadScript, Hangul_vert) {LangLoader("script/Hangul_vert" , GetParam());}
TEST_P(LoadScript, Hebrew) {LangLoader("script/Hebrew" , GetParam());}
TEST_P(LoadScript, Japanese) {LangLoader("script/Japanese" , GetParam());}
TEST_P(LoadScript, Japanese_vert) {LangLoader("script/Japanese_vert" , GetParam());}
TEST_P(LoadScript, Kannada) {LangLoader("script/Kannada" , GetParam());}
TEST_P(LoadScript, Khmer) {LangLoader("script/Khmer" , GetParam());}
TEST_P(LoadScript, Lao) {LangLoader("script/Lao" , GetParam());}
TEST_P(LoadScript, Latin) {LangLoader("script/Latin" , GetParam());}
TEST_P(LoadScript, Malayalam) {LangLoader("script/Malayalam" , GetParam());}
TEST_P(LoadScript, Myanmar) {LangLoader("script/Myanmar" , GetParam());}
TEST_P(LoadScript, Oriya) {LangLoader("script/Oriya" , GetParam());}
TEST_P(LoadScript, Sinhala) {LangLoader("script/Sinhala" , GetParam());}
TEST_P(LoadScript, Syriac) {LangLoader("script/Syriac" , GetParam());}
TEST_P(LoadScript, Tamil) {LangLoader("script/Tamil" , GetParam());}
TEST_P(LoadScript, Telugu) {LangLoader("script/Telugu" , GetParam());}
TEST_P(LoadScript, Thaana) {LangLoader("script/Thaana" , GetParam());}
TEST_P(LoadScript, Thai) {LangLoader("script/Thai" , GetParam());}
TEST_P(LoadScript, Tibetan) {LangLoader("script/Tibetan" , GetParam());}
TEST_P(LoadScript, Vietnamese) {LangLoader("script/Vietnamese" , GetParam());}
TEST_P(LoadScript, Arabic) { LangLoader("script/Arabic", GetParam()); }
TEST_P(LoadScript, Armenian) { LangLoader("script/Armenian", GetParam()); }
TEST_P(LoadScript, Bengali) { LangLoader("script/Bengali", GetParam()); }
TEST_P(LoadScript, Canadian_Aboriginal) {
LangLoader("script/Canadian_Aboriginal", GetParam());
}
TEST_P(LoadScript, Cherokee) { LangLoader("script/Cherokee", GetParam()); }
TEST_P(LoadScript, Cyrillic) { LangLoader("script/Cyrillic", GetParam()); }
TEST_P(LoadScript, Devanagari) { LangLoader("script/Devanagari", GetParam()); }
TEST_P(LoadScript, Ethiopic) { LangLoader("script/Ethiopic", GetParam()); }
TEST_P(LoadScript, Fraktur) { LangLoader("script/Fraktur", GetParam()); }
TEST_P(LoadScript, Georgian) { LangLoader("script/Georgian", GetParam()); }
TEST_P(LoadScript, Greek) { LangLoader("script/Greek", GetParam()); }
TEST_P(LoadScript, Gujarati) { LangLoader("script/Gujarati", GetParam()); }
TEST_P(LoadScript, Gurmukhi) { LangLoader("script/Gurmukhi", GetParam()); }
TEST_P(LoadScript, HanS) { LangLoader("script/HanS", GetParam()); }
TEST_P(LoadScript, HanS_vert) { LangLoader("script/HanS_vert", GetParam()); }
TEST_P(LoadScript, HanT) { LangLoader("script/HanT", GetParam()); }
TEST_P(LoadScript, HanT_vert) { LangLoader("script/HanT_vert", GetParam()); }
TEST_P(LoadScript, Hangul) { LangLoader("script/Hangul", GetParam()); }
TEST_P(LoadScript, Hangul_vert) {
LangLoader("script/Hangul_vert", GetParam());
}
TEST_P(LoadScript, Hebrew) { LangLoader("script/Hebrew", GetParam()); }
TEST_P(LoadScript, Japanese) { LangLoader("script/Japanese", GetParam()); }
TEST_P(LoadScript, Japanese_vert) {
LangLoader("script/Japanese_vert", GetParam());
}
TEST_P(LoadScript, Kannada) { LangLoader("script/Kannada", GetParam()); }
TEST_P(LoadScript, Khmer) { LangLoader("script/Khmer", GetParam()); }
TEST_P(LoadScript, Lao) { LangLoader("script/Lao", GetParam()); }
TEST_P(LoadScript, Latin) { LangLoader("script/Latin", GetParam()); }
TEST_P(LoadScript, Malayalam) { LangLoader("script/Malayalam", GetParam()); }
TEST_P(LoadScript, Myanmar) { LangLoader("script/Myanmar", GetParam()); }
TEST_P(LoadScript, Oriya) { LangLoader("script/Oriya", GetParam()); }
TEST_P(LoadScript, Sinhala) { LangLoader("script/Sinhala", GetParam()); }
TEST_P(LoadScript, Syriac) { LangLoader("script/Syriac", GetParam()); }
TEST_P(LoadScript, Tamil) { LangLoader("script/Tamil", GetParam()); }
TEST_P(LoadScript, Telugu) { LangLoader("script/Telugu", GetParam()); }
TEST_P(LoadScript, Thaana) { LangLoader("script/Thaana", GetParam()); }
TEST_P(LoadScript, Thai) { LangLoader("script/Thai", GetParam()); }
TEST_P(LoadScript, Tibetan) { LangLoader("script/Tibetan", GetParam()); }
TEST_P(LoadScript, Vietnamese) { LangLoader("script/Vietnamese", GetParam()); }
INSTANTIATE_TEST_CASE_P( DISABLED_Tessdata_fast, LoadScript,
::testing::Values(TESSDATA_DIR "_fast") );
INSTANTIATE_TEST_CASE_P( DISABLED_Tessdata_best, LoadScript,
::testing::Values(TESSDATA_DIR "_best") );
INSTANTIATE_TEST_CASE_P( DISABLED_Tessdata, LoadScript,
::testing::Values(TESSDATA_DIR) );
INSTANTIATE_TEST_CASE_P(DISABLED_Tessdata_fast, LoadScript,
::testing::Values(TESSDATA_DIR "_fast"));
INSTANTIATE_TEST_CASE_P(DISABLED_Tessdata_best, LoadScript,
::testing::Values(TESSDATA_DIR "_best"));
INSTANTIATE_TEST_CASE_P(DISABLED_Tessdata, LoadScript,
::testing::Values(TESSDATA_DIR));
class LoadLang : public QuickTest {
};
class LoadLang : public QuickTest {};
// Test Load of English here, as the parameterized tests are disabled by
// default.
TEST_F(LoadLang, engFast) { LangLoader("eng", TESSDATA_DIR "_fast"); }
TEST_F(LoadLang, engBest) { LangLoader("eng", TESSDATA_DIR "_best"); }
TEST_F(LoadLang, engBestInt) { LangLoader("eng", TESSDATA_DIR); }
// Test Load of English here, as the parameterized tests are disabled by default.
TEST_F(LoadLang, engFast) {LangLoader("eng" , TESSDATA_DIR "_fast");}
TEST_F(LoadLang, engBest) {LangLoader("eng" , TESSDATA_DIR "_best");}
TEST_F(LoadLang, engBestInt) {LangLoader("eng" , TESSDATA_DIR);}
// Use class LoadLang for languages which are NOT there in all three repos
TEST_F(LoadLang, kmrFast) {LangLoader("kmr" , TESSDATA_DIR "_fast");}
TEST_F(LoadLang, kmrBest) {LangLoader("kmr" , TESSDATA_DIR "_best");}
TEST_F(LoadLang, kmrFast) { LangLoader("kmr", TESSDATA_DIR "_fast"); }
TEST_F(LoadLang, kmrBest) { LangLoader("kmr", TESSDATA_DIR "_best"); }
// TEST_F(LoadLang, kmrBestInt) {LangLoader("kmr" , TESSDATA_DIR);}
} // namespace

View File

@ -1,7 +1,8 @@
///////////////////////////////////////////////////////////////////////
// File: log.h
// Description: Include for custom log message for unittest for tesseract.
// based on //https://stackoverflow.com/questions/16491675/how-to-send-custom-message-in-google-c-testing-framework
// based on
// //https://stackoverflow.com/questions/16491675/how-to-send-custom-message-in-google-c-testing-framework
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@ -18,11 +19,13 @@
#include <iostream>
static class LOG { public: LOG() {}
std::ostream& info() {
std::cout << "[ LOG MSG ] ";
return std::cout;
}
static class LOG {
public:
LOG() {}
std::ostream& info() {
std::cout << "[ LOG MSG ] ";
return std::cout;
}
} log;
#endif // TESSERACT_UNITTEST_LOG_H_

View File

@ -61,9 +61,8 @@ TEST_F(LSTMTrainerTest, ConvertModel) {
deu_trainer.InitCharSet(TestDataNameToPath("deu.traineddata"));
// Load the fra traineddata, strip out the model, and save to a tmp file.
TessdataManager mgr;
string fra_data = file::JoinPath(
FLAGS_test_srcdir, "tessdata_best",
"fra.traineddata");
string fra_data =
file::JoinPath(FLAGS_test_srcdir, "tessdata_best", "fra.traineddata");
CHECK(mgr.Init(fra_data.c_str())) << "Failed to load " << fra_data;
string model_path = file::JoinPath(FLAGS_test_tmpdir, "fra.lstm");
CHECK(mgr.ExtractToFile(model_path.c_str()));
@ -76,7 +75,7 @@ TEST_F(LSTMTrainerTest, ConvertModel) {
// baseapi_test.cc).
TessBaseAPI api;
api.Init(FLAGS_test_tmpdir.c_str(), "deu", tesseract::OEM_LSTM_ONLY);
Pix *src_pix = pixRead(TestDataNameToPath("phototest.tif").c_str());
Pix* src_pix = pixRead(TestDataNameToPath("phototest.tif").c_str());
CHECK(src_pix);
api.SetImage(src_pix);
std::unique_ptr<char[]> result(api.GetUTF8Text());

View File

@ -41,8 +41,9 @@ const int kNumNonReject = 1000;
const int kNumCorrect = kNumNonReject - kNumTop1Errs;
// The total number of answers is given by the number of non-rejects plus
// all the multiple answers.
const int kNumAnswers = kNumNonReject + 2*(kNumTop2Errs - kNumTopNErrs) +
(kNumTop1Errs - kNumTop2Errs) + (kNumTopTopErrs - kNumTop1Errs);
const int kNumAnswers = kNumNonReject + 2 * (kNumTop2Errs - kNumTopNErrs) +
(kNumTop1Errs - kNumTop2Errs) +
(kNumTopTopErrs - kNumTop1Errs);
namespace tesseract {
@ -51,7 +52,7 @@ namespace tesseract {
class MockClassifier : public ShapeClassifier {
public:
explicit MockClassifier(ShapeTable* shape_table)
: shape_table_(shape_table), num_done_(0), done_bad_font_(false) {
: shape_table_(shape_table), num_done_(0), done_bad_font_(false) {
// Add a false font answer to the shape table. We pick a random unichar_id,
// add a new shape for it with a false font. Font must actually exist in
// the font table, but not match anything in the first 1000 samples.
@ -108,9 +109,7 @@ class MockClassifier : public ShapeClassifier {
return results->size();
}
// Provides access to the ShapeTable that this classifier works with.
virtual const ShapeTable* GetShapeTable() const {
return shape_table_;
}
virtual const ShapeTable* GetShapeTable() const { return shape_table_; }
private:
// Borrowed pointer to the ShapeTable.
@ -140,20 +139,18 @@ const double kMin1lDistance = 0.25;
class MasterTrainerTest : public testing::Test {
protected:
string TestDataNameToPath(const string& name) {
return file::JoinPath(FLAGS_test_srcdir,
"testdata/" + name);
return file::JoinPath(FLAGS_test_srcdir, "testdata/" + name);
}
string TessdataPath() {
return file::JoinPath(FLAGS_test_srcdir,
"tessdata");
return file::JoinPath(FLAGS_test_srcdir, "tessdata");
}
string TmpNameToPath(const string& name) {
return file::JoinPath(FLAGS_test_tmpdir, name);
}
MasterTrainerTest() {
shape_table_ = NULL;
master_trainer_ = NULL;
shape_table_ = nullptr;
master_trainer_ = nullptr;
}
~MasterTrainerTest() {
delete master_trainer_;
@ -169,17 +166,17 @@ class MasterTrainerTest : public testing::Test {
FLAGS_X = TestDataNameToPath("eng.xheights");
FLAGS_U = TestDataNameToPath("eng.unicharset");
string tr_file_name(TestDataNameToPath("eng.Arial.exp0.tr"));
const char* argv[] = {tr_file_name.c_str() };
const char* argv[] = {tr_file_name.c_str()};
int argc = 1;
STRING file_prefix;
delete master_trainer_;
delete shape_table_;
shape_table_ = NULL;
shape_table_ = nullptr;
tessoptind = 0;
master_trainer_ = LoadTrainingData(argc, argv, false,
&shape_table_, &file_prefix);
EXPECT_TRUE(master_trainer_ != NULL);
EXPECT_TRUE(shape_table_ != NULL);
master_trainer_ =
LoadTrainingData(argc, argv, false, &shape_table_, &file_prefix);
EXPECT_TRUE(master_trainer_ != nullptr);
EXPECT_TRUE(shape_table_ != nullptr);
}
// EXPECTs that the distance between I and l in Arial is 0 and that the
@ -203,29 +200,29 @@ class MasterTrainerTest : public testing::Test {
int shape_1 = shape_table_->FindShape(unichar_1, font_id);
EXPECT_GE(shape_1, 0);
float dist_I_l = master_trainer_->ShapeDistance(*shape_table_,
shape_I, shape_l);
float dist_I_l =
master_trainer_->ShapeDistance(*shape_table_, shape_I, shape_l);
// No tolerance here. We expect that I and l should match exactly.
EXPECT_EQ(0.0f, dist_I_l);
float dist_l_I = master_trainer_->ShapeDistance(*shape_table_,
shape_l, shape_I);
float dist_l_I =
master_trainer_->ShapeDistance(*shape_table_, shape_l, shape_I);
// BOTH ways.
EXPECT_EQ(0.0f, dist_l_I);
// l/1 on the other hand should be distinct.
float dist_l_1 = master_trainer_->ShapeDistance(*shape_table_,
shape_l, shape_1);
float dist_l_1 =
master_trainer_->ShapeDistance(*shape_table_, shape_l, shape_1);
EXPECT_GT(dist_l_1, kMin1lDistance);
float dist_1_l = master_trainer_->ShapeDistance(*shape_table_,
shape_1, shape_l);
float dist_1_l =
master_trainer_->ShapeDistance(*shape_table_, shape_1, shape_l);
EXPECT_GT(dist_1_l, kMin1lDistance);
// So should I/1.
float dist_I_1 = master_trainer_->ShapeDistance(*shape_table_,
shape_I, shape_1);
float dist_I_1 =
master_trainer_->ShapeDistance(*shape_table_, shape_I, shape_1);
EXPECT_GT(dist_I_1, kMin1lDistance);
float dist_1_I = master_trainer_->ShapeDistance(*shape_table_,
shape_1, shape_I);
float dist_1_I =
master_trainer_->ShapeDistance(*shape_table_, shape_1, shape_I);
EXPECT_GT(dist_1_I, kMin1lDistance);
}
@ -249,15 +246,14 @@ TEST_F(MasterTrainerTest, ErrorCounterTest) {
LoadMasterTrainer();
// Add the space character to the shape_table_ if not already present to
// count junk.
if (shape_table_->FindShape(0, -1) < 0)
shape_table_->AddShape(0, 0);
if (shape_table_->FindShape(0, -1) < 0) shape_table_->AddShape(0, 0);
// Make a mock classifier.
tesseract::ShapeClassifier* shape_classifier =
new tesseract::MockClassifier(shape_table_);
// Get the accuracy report.
STRING accuracy_report;
master_trainer_->TestClassifierOnSamples(tesseract::CT_UNICHAR_TOP1_ERR,
0, false, shape_classifier,
master_trainer_->TestClassifierOnSamples(tesseract::CT_UNICHAR_TOP1_ERR, 0,
false, shape_classifier,
&accuracy_report);
LOG(INFO) << accuracy_report.string();
string result_string = accuracy_report.string();
@ -287,6 +283,3 @@ TEST_F(MasterTrainerTest, ErrorCounterTest) {
}
} // namespace.

View File

@ -15,8 +15,8 @@
///////////////////////////////////////////////////////////////////////
#include "matrix.h"
#include "include_gunit.h"
#include "genericvector.h"
#include "include_gunit.h"
#include "tprintf.h"
namespace {

View File

@ -1,10 +1,10 @@
#include "tesseract/lstm/networkio.h"
#include "tesseract/lstm/stridemap.h"
using tesseract::FlexDimensions;
using tesseract::FD_BATCH;
using tesseract::FD_HEIGHT;
using tesseract::FD_WIDTH;
using tesseract::FlexDimensions;
using tesseract::NetworkIO;
using tesseract::StrideMap;
@ -93,9 +93,9 @@ TEST_F(NetworkioTest, CopyWithYReversal) {
StrideMap::Index index(copy.stride_map());
int next_t = 0;
int pos = 0;
std::vector<int> expected_values = {8, 9, 10, 11, 4, 5, 6, 7, 0, 1, 2,
3, 27, 28, 29, 30, 31, 22, 23, 24, 25, 26,
17, 18, 19, 20, 21, 12, 13, 14, 15, 16};
std::vector<int> expected_values = {
8, 9, 10, 11, 4, 5, 6, 7, 0, 1, 2, 3, 27, 28, 29, 30,
31, 22, 23, 24, 25, 26, 17, 18, 19, 20, 21, 12, 13, 14, 15, 16};
do {
int t = index.t();
// The indexed values match the expected values.
@ -125,9 +125,9 @@ TEST_F(NetworkioTest, CopyWithXReversal) {
StrideMap::Index index(copy.stride_map());
int next_t = 0;
int pos = 0;
std::vector<int> expected_values = {3, 2, 1, 0, 7, 6, 5, 4, 11, 10, 9,
8, 16, 15, 14, 13, 12, 21, 20, 19, 18, 17,
26, 25, 24, 23, 22, 31, 30, 29, 28, 27};
std::vector<int> expected_values = {
3, 2, 1, 0, 7, 6, 5, 4, 11, 10, 9, 8, 16, 15, 14, 13,
12, 21, 20, 19, 18, 17, 26, 25, 24, 23, 22, 31, 30, 29, 28, 27};
do {
int t = index.t();
// The indexed values match the expected values.
@ -157,9 +157,9 @@ TEST_F(NetworkioTest, CopyWithXYTranspose) {
StrideMap::Index index(copy.stride_map());
int next_t = 0;
int pos = 0;
std::vector<int> expected_values = {0, 4, 8, 1, 5, 9, 2, 6, 10, 3, 7,
11, 12, 17, 22, 27, 13, 18, 23, 28, 14, 19,
24, 29, 15, 20, 25, 30, 16, 21, 26, 31};
std::vector<int> expected_values = {
0, 4, 8, 1, 5, 9, 2, 6, 10, 3, 7, 11, 12, 17, 22, 27,
13, 18, 23, 28, 14, 19, 24, 29, 15, 20, 25, 30, 16, 21, 26, 31};
do {
int t = index.t();
// The indexed values match the expected values.

View File

@ -23,7 +23,7 @@ TEST(NormstrngsTest, BasicText) {
}
TEST(NormstrngsTest, LigatureText) {
const char* kTwoByteLigText = "ij"; // U+0133 (ij) -> ij
const char* kTwoByteLigText = "ij"; // U+0133 (ij) -> ij
string result;
EXPECT_TRUE(NormalizeUTF8String(UnicodeNormMode::kNFKC, OCRNorm::kNormalize,
GraphemeNorm::kNormalize, kTwoByteLigText,
@ -51,7 +51,7 @@ TEST(NormstrngsTest, OcrSpecificNormalization) {
&result));
EXPECT_STREQ("\"Hi", result.c_str());
const char* kEmDash = "Hi—"; // U+2014 (—) -> U+02D (-)
const char* kEmDash = "Hi—"; // U+2014 (—) -> U+02D (-)
EXPECT_TRUE(NormalizeUTF8String(UnicodeNormMode::kNFKC, OCRNorm::kNormalize,
GraphemeNorm::kNormalize, kEmDash, &result));
EXPECT_STREQ("Hi-", result.c_str());

View File

@ -16,7 +16,7 @@
namespace tesseract {
int test_data[] = { 8, 1, 2, -4, 7, 9, 65536, 4, 9, 0, -32767, 6, 7};
int test_data[] = {8, 1, 2, -4, 7, 9, 65536, 4, 9, 0, -32767, 6, 7};
// The fixture for testing GenericHeap and DoublePtr.
class NthItemTest : public testing::Test {
@ -64,7 +64,7 @@ TEST_F(NthItemTest, GeneralTest) {
TEST_F(NthItemTest, BoringTest) {
KDVector v;
// Push the test data onto the KDVector.
int test_data[] = { 8, 8, 8, 8, 8, 7, 7, 7, 7};
int test_data[] = {8, 8, 8, 8, 8, 7, 7, 7, 7};
for (int i = 0; i < ARRAYSIZE(test_data); ++i) {
IntKDPair pair(test_data[i], i);
v.push_back(pair);

View File

@ -14,104 +14,112 @@
// limitations under the License.
///////////////////////////////////////////////////////////////////////
//based on https://gist.github.com/amitdo/7c7a522004dd79b398340c9595b377e1
// based on https://gist.github.com/amitdo/7c7a522004dd79b398340c9595b377e1
// expects clones of tessdata, tessdata_fast and tessdata_best repos
//#include "log.h"
#include "include_gunit.h"
#include "baseapi.h"
#include "leptonica/allheaders.h"
#include <iostream>
#include <string>
#include "baseapi.h"
#include "include_gunit.h"
#include "leptonica/allheaders.h"
namespace {
class TestClass : public testing::Test {
protected:
};
};
void OSDTester( int expected_deg, const char* imgname, const char* tessdatadir) {
//log.info() << tessdatadir << " for image: " << imgname << std::endl;
tesseract::TessBaseAPI *api = new tesseract::TessBaseAPI();
ASSERT_FALSE(api->Init(tessdatadir, "osd")) << "Could not initialize tesseract.";
Pix *image = pixRead(imgname);
ASSERT_TRUE(image != nullptr) << "Failed to read test image.";
api->SetImage(image);
int orient_deg;
float orient_conf;
const char* script_name;
float script_conf;
bool detected = api->DetectOrientationScript(&orient_deg, &orient_conf, &script_name, &script_conf);
ASSERT_FALSE(!detected) << "Failed to detect OSD.";
printf("************ Orientation in degrees: %d, Orientation confidence: %.2f\n"
" Script: %s, Script confidence: %.2f\n",
orient_deg, orient_conf,
script_name, script_conf);
EXPECT_EQ(expected_deg, orient_deg);
api->End();
pixDestroy(&image);
}
void OSDTester(int expected_deg, const char* imgname, const char* tessdatadir) {
// log.info() << tessdatadir << " for image: " << imgname << std::endl;
tesseract::TessBaseAPI* api = new tesseract::TessBaseAPI();
ASSERT_FALSE(api->Init(tessdatadir, "osd"))
<< "Could not initialize tesseract.";
Pix* image = pixRead(imgname);
ASSERT_TRUE(image != nullptr) << "Failed to read test image.";
api->SetImage(image);
int orient_deg;
float orient_conf;
const char* script_name;
float script_conf;
bool detected = api->DetectOrientationScript(&orient_deg, &orient_conf,
&script_name, &script_conf);
ASSERT_FALSE(!detected) << "Failed to detect OSD.";
printf(
"************ Orientation in degrees: %d, Orientation confidence: %.2f\n"
" Script: %s, Script confidence: %.2f\n",
orient_deg, orient_conf, script_name, script_conf);
EXPECT_EQ(expected_deg, orient_deg);
api->End();
pixDestroy(&image);
}
class OSDTest : public TestClass ,
public ::testing::WithParamInterface<std::tuple<int, const char*, const char*>> {};
class OSDTest : public TestClass,
public ::testing::WithParamInterface<
std::tuple<int, const char*, const char*>> {};
TEST_P(OSDTest, MatchOrientationDegrees) {
OSDTester(std::get<0>(GetParam()), std::get<1>(GetParam()), std::get<2>(GetParam()));
}
TEST_P(OSDTest, MatchOrientationDegrees) {
OSDTester(std::get<0>(GetParam()), std::get<1>(GetParam()),
std::get<2>(GetParam()));
}
INSTANTIATE_TEST_CASE_P( TessdataEngEuroHebrew, OSDTest,
::testing::Combine(
::testing::Values(0),
::testing::Values(TESTING_DIR "/phototest.tif",
TESTING_DIR "/eurotext.tif",
TESTING_DIR "/hebrew.png"),
::testing::Values(TESSDATA_DIR)));
INSTANTIATE_TEST_CASE_P(
TessdataEngEuroHebrew, OSDTest,
::testing::Combine(::testing::Values(0),
::testing::Values(TESTING_DIR "/phototest.tif",
TESTING_DIR "/eurotext.tif",
TESTING_DIR "/hebrew.png"),
::testing::Values(TESSDATA_DIR)));
INSTANTIATE_TEST_CASE_P( TessdataBestEngEuroHebrew, OSDTest,
::testing::Combine(
::testing::Values(0),
::testing::Values(TESTING_DIR "/phototest.tif",
TESTING_DIR "/eurotext.tif",
TESTING_DIR "/hebrew.png"),
::testing::Values(TESSDATA_DIR "_best")));
INSTANTIATE_TEST_CASE_P(
TessdataBestEngEuroHebrew, OSDTest,
::testing::Combine(::testing::Values(0),
::testing::Values(TESTING_DIR "/phototest.tif",
TESTING_DIR "/eurotext.tif",
TESTING_DIR "/hebrew.png"),
::testing::Values(TESSDATA_DIR "_best")));
INSTANTIATE_TEST_CASE_P( TessdataFastEngEuroHebrew, OSDTest,
::testing::Combine(
::testing::Values(0),
::testing::Values(TESTING_DIR "/phototest.tif",
TESTING_DIR "/eurotext.tif",
TESTING_DIR "/hebrew.png"),
::testing::Values(TESSDATA_DIR "_fast")));
INSTANTIATE_TEST_CASE_P(
TessdataFastEngEuroHebrew, OSDTest,
::testing::Combine(::testing::Values(0),
::testing::Values(TESTING_DIR "/phototest.tif",
TESTING_DIR "/eurotext.tif",
TESTING_DIR "/hebrew.png"),
::testing::Values(TESSDATA_DIR "_fast")));
INSTANTIATE_TEST_CASE_P( TessdataFastRotated90, OSDTest,
::testing::Combine(
::testing::Values(90),
::testing::Values(TESTING_DIR "/phototest-rotated-R.png"),
::testing::Values(TESSDATA_DIR "_fast")));
INSTANTIATE_TEST_CASE_P(
TessdataFastRotated90, OSDTest,
::testing::Combine(::testing::Values(90),
::testing::Values(TESTING_DIR
"/phototest-rotated-R.png"),
::testing::Values(TESSDATA_DIR "_fast")));
INSTANTIATE_TEST_CASE_P( TessdataFastRotated180, OSDTest,
::testing::Combine(
::testing::Values(180),
::testing::Values(TESTING_DIR "/phototest-rotated-180.png"),
::testing::Values(TESSDATA_DIR "_fast")));
INSTANTIATE_TEST_CASE_P(
TessdataFastRotated180, OSDTest,
::testing::Combine(::testing::Values(180),
::testing::Values(TESTING_DIR
"/phototest-rotated-180.png"),
::testing::Values(TESSDATA_DIR "_fast")));
INSTANTIATE_TEST_CASE_P( TessdataFastRotated270, OSDTest,
::testing::Combine(
::testing::Values(270),
::testing::Values(TESTING_DIR "/phototest-rotated-L.png"),
::testing::Values(TESSDATA_DIR "_fast")));
INSTANTIATE_TEST_CASE_P(
TessdataFastRotated270, OSDTest,
::testing::Combine(::testing::Values(270),
::testing::Values(TESTING_DIR
"/phototest-rotated-L.png"),
::testing::Values(TESSDATA_DIR "_fast")));
INSTANTIATE_TEST_CASE_P( TessdataFastDevaRotated270, OSDTest,
::testing::Combine(
::testing::Values(270),
::testing::Values(TESTING_DIR "/devatest-rotated-270.png"),
::testing::Values(TESSDATA_DIR "_fast")));
INSTANTIATE_TEST_CASE_P(
TessdataFastDevaRotated270, OSDTest,
::testing::Combine(::testing::Values(270),
::testing::Values(TESTING_DIR
"/devatest-rotated-270.png"),
::testing::Values(TESSDATA_DIR "_fast")));
INSTANTIATE_TEST_CASE_P( TessdataFastDeva, OSDTest,
::testing::Combine(
::testing::Values(0),
::testing::Values(TESTING_DIR "/devatest.png"),
::testing::Values(TESSDATA_DIR "_fast")));
INSTANTIATE_TEST_CASE_P(
TessdataFastDeva, OSDTest,
::testing::Combine(::testing::Values(0),
::testing::Values(TESTING_DIR "/devatest.png"),
::testing::Values(TESSDATA_DIR "_fast")));
} // namespace

View File

@ -10,20 +10,14 @@ namespace {
class PageSegModeTest : public testing::Test {
protected:
string TestDataNameToPath(const string& name) {
return file::JoinPath(FLAGS_test_srcdir,
"testdata/" + name);
return file::JoinPath(FLAGS_test_srcdir, "testdata/" + name);
}
string TessdataPath() {
return file::JoinPath(FLAGS_test_srcdir,
"tessdata");
return file::JoinPath(FLAGS_test_srcdir, "tessdata");
}
PageSegModeTest() {
src_pix_ = NULL;
}
~PageSegModeTest() {
pixDestroy(&src_pix_);
}
PageSegModeTest() { src_pix_ = nullptr; }
~PageSegModeTest() { pixDestroy(&src_pix_); }
void SetImage(const char* filename) {
pixDestroy(&src_pix_);
@ -34,26 +28,26 @@ class PageSegModeTest : public testing::Test {
// Tests that the given rectangle produces exactly the given text in the
// given segmentation mode (after chopping off the last 2 newlines.)
void VerifyRectText(tesseract::PageSegMode mode, const char* str,
int left, int top, int width, int height) {
void VerifyRectText(tesseract::PageSegMode mode, const char* str, int left,
int top, int width, int height) {
api_.SetPageSegMode(mode);
api_.SetRectangle(left, top, width, height);
char* result = api_.GetUTF8Text();
chomp_string(result);
chomp_string(result);
EXPECT_STREQ(str, result);
delete [] result;
delete[] result;
}
// Tests that the given rectangle does NOT produce the given text in the
// given segmentation mode.
void NotRectText(tesseract::PageSegMode mode, const char* str,
int left, int top, int width, int height) {
void NotRectText(tesseract::PageSegMode mode, const char* str, int left,
int top, int width, int height) {
api_.SetPageSegMode(mode);
api_.SetRectangle(left, top, width, height);
char* result = api_.GetUTF8Text();
EXPECT_STRNE(str, result);
delete [] result;
delete[] result;
}
Pix* src_pix_;
@ -66,26 +60,21 @@ class PageSegModeTest : public testing::Test {
TEST_F(PageSegModeTest, WordTest) {
SetImage("segmodeimg.tif");
// Test various rectangles around the inverse page number.
VerifyRectText(tesseract::PSM_SINGLE_WORD, "183",
1482, 146, 72, 44);
VerifyRectText(tesseract::PSM_SINGLE_WORD, "183",
1474, 134, 82, 72);
VerifyRectText(tesseract::PSM_SINGLE_WORD, "183",
1459, 116, 118, 112);
VerifyRectText(tesseract::PSM_SINGLE_WORD, "183", 1482, 146, 72, 44);
VerifyRectText(tesseract::PSM_SINGLE_WORD, "183", 1474, 134, 82, 72);
VerifyRectText(tesseract::PSM_SINGLE_WORD, "183", 1459, 116, 118, 112);
// Test a random pair of words as a line
VerifyRectText(tesseract::PSM_SINGLE_LINE, "What should",
1119, 621, 245, 54);
VerifyRectText(tesseract::PSM_SINGLE_LINE, "What should", 1119, 621, 245, 54);
// Test a random pair of words as a word
VerifyRectText(tesseract::PSM_SINGLE_WORD, "Whatshould",
1119, 621, 245, 54);
VerifyRectText(tesseract::PSM_SINGLE_WORD, "Whatshould", 1119, 621, 245, 54);
// Test single block mode.
VerifyRectText(tesseract::PSM_SINGLE_BLOCK, "both the\nfrom the",
181, 676, 179, 104);
VerifyRectText(tesseract::PSM_SINGLE_BLOCK, "both the\nfrom the", 181, 676,
179, 104);
// But doesn't work in line or word mode.
NotRectText(tesseract::PSM_SINGLE_LINE, "both the\nfrom the",
181, 676, 179, 104);
NotRectText(tesseract::PSM_SINGLE_WORD, "both the\nfrom the",
181, 676, 179, 104);
NotRectText(tesseract::PSM_SINGLE_LINE, "both the\nfrom the", 181, 676, 179,
104);
NotRectText(tesseract::PSM_SINGLE_WORD, "both the\nfrom the", 181, 676, 179,
104);
}
} // namespace

View File

@ -14,19 +14,24 @@ DECLARE_BOOL_PARAM_FLAG(use_only_legacy_fonts);
namespace {
using tesseract::FontUtils;
using tesseract::File;
using tesseract::FontUtils;
using tesseract::PangoFontInfo;
// Fonts in testdata directory
const char* kExpectedFontNames[] = {
"Arab", "Arial Bold Italic", "DejaVu Sans Ultra-Light", "Lohit Hindi",
const char* kExpectedFontNames[] = {"Arab",
"Arial Bold Italic",
"DejaVu Sans Ultra-Light",
"Lohit Hindi",
#if PANGO_VERSION <= 12005
"Times New Roman",
"Times New Roman",
#else
"Times New Roman,", // Pango v1.36.2 requires a trailing ','
"Times New Roman,", // Pango v1.36.2
// requires a trailing
// ','
#endif
"UnBatang", "Verdana"};
"UnBatang",
"Verdana"};
// Sample text used in tests.
const char kArabicText[] = "والفكر والصراع 1234,\nوالفكر والصراع";
@ -36,18 +41,17 @@ const char kKorText[] = "이는 것으로";
// Hindi words containing illegal vowel sequences.
const char* kBadlyFormedHinWords[] = {
#if PANGO_VERSION <= 12005
"उपयोक्ताो", "नहीें", "कहीअे", "पत्रिाका", "छह्णाीस",
"उपयोक्ताो", "नहीें", "कहीअे", "पत्रिाका", "छह्णाीस",
#endif
// Pango v1.36.2 will render the above words even though they are invalid.
"प्रंात", NULL };
// Pango v1.36.2 will render the above words even though they are invalid.
"प्रंात", nullptr};
class PangoFontInfoTest : public ::testing::Test {
protected:
// Creates a fake fonts.conf file that points to the testdata fonts for
// fontconfig to initialize with.
static void SetUpTestCase() {
FLAGS_fonts_dir = File::JoinPath(
FLAGS_test_srcdir, "testdata");
FLAGS_fonts_dir = File::JoinPath(FLAGS_test_srcdir, "testdata");
FLAGS_fontconfig_tmpdir = FLAGS_test_tmpdir;
FLAGS_use_only_legacy_fonts = false;
}
@ -111,12 +115,11 @@ TEST_F(PangoFontInfoTest, CanRenderString) {
TEST_F(PangoFontInfoTest, CanRenderLigature) {
font_info_.ParseFontDescriptionName("Arab 12");
const char kArabicLigature[] = "لا";
EXPECT_TRUE(font_info_.CanRenderString(kArabicLigature,
strlen(kArabicLigature)));
EXPECT_TRUE(
font_info_.CanRenderString(kArabicLigature, strlen(kArabicLigature)));
printf("Next word\n");
EXPECT_TRUE(font_info_.CanRenderString(kArabicText,
strlen(kArabicText)));
EXPECT_TRUE(font_info_.CanRenderString(kArabicText, strlen(kArabicText)));
}
TEST_F(PangoFontInfoTest, CannotRenderUncoveredString) {
@ -126,7 +129,7 @@ TEST_F(PangoFontInfoTest, CannotRenderUncoveredString) {
TEST_F(PangoFontInfoTest, CannotRenderInvalidString) {
font_info_.ParseFontDescriptionName("Lohit Hindi 12");
for (int i = 0; kBadlyFormedHinWords[i] != NULL; ++i) {
for (int i = 0; kBadlyFormedHinWords[i] != nullptr; ++i) {
EXPECT_FALSE(font_info_.CanRenderString(kBadlyFormedHinWords[i],
strlen(kBadlyFormedHinWords[i])))
<< "Can render " << kBadlyFormedHinWords[i];
@ -142,9 +145,9 @@ TEST_F(PangoFontInfoTest, CanDropUncoveredChars) {
// Dont drop non-letter characters like word joiners.
const char* kJoiners[] = {
"\u2060", // U+2060 (WJ)
"\u200C", // U+200C (ZWJ)
"\u200D" // U+200D (ZWNJ)
"\u2060", // U+2060 (WJ)
"\u200C", // U+200C (ZWJ)
"\u200D" // U+200D (ZWNJ)
};
for (int i = 0; i < ARRAYSIZE(kJoiners); ++i) {
word = kJoiners[i];
@ -153,7 +156,6 @@ TEST_F(PangoFontInfoTest, CanDropUncoveredChars) {
}
}
// ------------------------ FontUtils ------------------------------------
class FontUtilsTest : public ::testing::Test {
@ -161,8 +163,7 @@ class FontUtilsTest : public ::testing::Test {
// Creates a fake fonts.conf file that points to the testdata fonts for
// fontconfig to initialize with.
static void SetUpTestCase() {
FLAGS_fonts_dir = File::JoinPath(
FLAGS_test_srcdir, "testdata");
FLAGS_fonts_dir = File::JoinPath(FLAGS_test_srcdir, "testdata");
FLAGS_fontconfig_tmpdir = FLAGS_test_tmpdir;
}
@ -229,9 +230,9 @@ TEST_F(FontUtilsTest, DoesFindBestFonts) {
}
TEST_F(FontUtilsTest, DoesSelectFont) {
const char* kLangText[] = { kArabicText, kEngText, kHinText, kKorText, NULL };
const char* kLangNames[] = { "Arabic", "English", "Hindi", "Korean", NULL };
for (int i = 0; kLangText[i] != NULL; ++i) {
const char* kLangText[] = {kArabicText, kEngText, kHinText, kKorText, nullptr};
const char* kLangNames[] = {"Arabic", "English", "Hindi", "Korean", nullptr};
for (int i = 0; kLangText[i] != nullptr; ++i) {
SCOPED_TRACE(kLangNames[i]);
std::vector<string> graphemes;
string selected_font;
@ -246,8 +247,7 @@ TEST_F(FontUtilsTest, DoesFailToSelectFont) {
const char kMixedScriptText[] = "पिताने विवाह की | والفكر والصراع";
std::vector<string> graphemes;
string selected_font;
EXPECT_FALSE(FontUtils::SelectFont(kMixedScriptText,
strlen(kMixedScriptText),
EXPECT_FALSE(FontUtils::SelectFont(kMixedScriptText, strlen(kMixedScriptText),
&selected_font, &graphemes));
}
@ -271,17 +271,16 @@ TEST_F(FontUtilsTest, GetAllRenderableCharacters) {
FontUtils::GetAllRenderableCharacters(selected_fonts, &unicode_mask);
EXPECT_TRUE(unicode_mask['1']);
EXPECT_TRUE(unicode_mask[kHindiChar]);
EXPECT_FALSE(unicode_mask['A']); // Lohit doesn't render English,
EXPECT_FALSE(unicode_mask[kArabicChar]); // or Arabic,
EXPECT_FALSE(unicode_mask[kMongolianChar]); // or Mongolian,
EXPECT_FALSE(unicode_mask[kOghamChar]); // or Ogham.
EXPECT_FALSE(unicode_mask['A']); // Lohit doesn't render English,
EXPECT_FALSE(unicode_mask[kArabicChar]); // or Arabic,
EXPECT_FALSE(unicode_mask[kMongolianChar]); // or Mongolian,
EXPECT_FALSE(unicode_mask[kOghamChar]); // or Ogham.
// Check that none of the included fonts cover the Mongolian or Ogham space
// characters.
for (int f = 0; f < ARRAYSIZE(kExpectedFontNames); ++f) {
SCOPED_TRACE(absl::StrCat("Testing ", kExpectedFontNames[f]));
FontUtils::GetAllRenderableCharacters(kExpectedFontNames[f],
&unicode_mask);
FontUtils::GetAllRenderableCharacters(kExpectedFontNames[f], &unicode_mask);
EXPECT_FALSE(unicode_mask[kOghamChar]);
EXPECT_FALSE(unicode_mask[kMongolianChar]);
}

View File

@ -9,12 +9,11 @@ namespace { // anonymous namespace
// Functions for making monospace ASCII trial text for the paragraph detector.
const tesseract::ParagraphJustification kLeft = tesseract::JUSTIFICATION_LEFT;
const tesseract::ParagraphJustification kCenter
= tesseract::JUSTIFICATION_CENTER;
const tesseract::ParagraphJustification kRight
= tesseract::JUSTIFICATION_RIGHT;
const tesseract::ParagraphJustification kUnknown
= tesseract::JUSTIFICATION_UNKNOWN;
const tesseract::ParagraphJustification kCenter =
tesseract::JUSTIFICATION_CENTER;
const tesseract::ParagraphJustification kRight = tesseract::JUSTIFICATION_RIGHT;
const tesseract::ParagraphJustification kUnknown =
tesseract::JUSTIFICATION_UNKNOWN;
enum TextModelInputType {
PCONT = 0, // Continuation line of a paragraph (default).
@ -23,7 +22,7 @@ enum TextModelInputType {
};
struct TextAndModel {
const char *ascii;
const char* ascii;
TextModelInputType model_type;
// fields corresponding to PARA (see ccstruct/ocrpara.h)
@ -34,13 +33,13 @@ struct TextAndModel {
// Imagine that the given text is typewriter ASCII with each character ten
// pixels wide and twenty pixels high and return an appropriate row_info.
void AsciiToRowInfo(const char *text, int row_number,
tesseract::RowInfo *info) {
void AsciiToRowInfo(const char* text, int row_number,
tesseract::RowInfo* info) {
const int kCharWidth = 10;
const int kLineSpace = 30;
info->text = text;
info->has_leaders = strstr(text, "...") != NULL ||
strstr(text, ". . .") != NULL;
info->has_leaders =
strstr(text, "...") != nullptr || strstr(text, ". . .") != nullptr;
info->has_drop_cap = false;
info->pix_ldistance = info->pix_rdistance = 0;
info->average_interword_space = kCharWidth;
@ -50,20 +49,21 @@ void AsciiToRowInfo(const char *text, int row_number,
std::vector<string> words = absl::StrSplit(text, ' ', absl::SkipEmpty());
info->num_words = words.size();
if (info->num_words < 1)
return;
if (info->num_words < 1) return;
info->lword_text = words[0].c_str();
info->rword_text = words[words.size() - 1].c_str();
int lspace = 0;
while (lspace < info->text.size() && text[lspace] == ' ') { lspace++; }
while (lspace < info->text.size() && text[lspace] == ' ') {
lspace++;
}
int rspace = 0;
while (rspace < info->text.size() &&
text[info->text.size() - rspace - 1] == ' ') {
rspace++;
}
int top = - kLineSpace * row_number;
int top = -kLineSpace * row_number;
int bottom = top - kLineSpace;
int row_right = kCharWidth * info->text.size();
int lword_width = kCharWidth * info->lword_text.size();
@ -71,25 +71,19 @@ void AsciiToRowInfo(const char *text, int row_number,
info->pix_ldistance = lspace * kCharWidth;
info->pix_rdistance = rspace * kCharWidth;
info->lword_box =
TBOX(info->pix_ldistance, bottom,
info->pix_ldistance + lword_width, top);
info->rword_box =
TBOX(row_right - info->pix_rdistance - rword_width, bottom,
row_right - info->pix_rdistance, top);
TBOX(info->pix_ldistance, bottom, info->pix_ldistance + lword_width, top);
info->rword_box = TBOX(row_right - info->pix_rdistance - rword_width, bottom,
row_right - info->pix_rdistance, top);
tesseract::LeftWordAttributes(
NULL, NULL, info->lword_text,
&info->lword_indicates_list_item,
&info->lword_likely_starts_idea,
&info->lword_likely_ends_idea);
nullptr, nullptr, info->lword_text, &info->lword_indicates_list_item,
&info->lword_likely_starts_idea, &info->lword_likely_ends_idea);
tesseract::RightWordAttributes(
NULL, NULL, info->rword_text,
&info->rword_indicates_list_item,
&info->rword_likely_starts_idea,
&info->rword_likely_ends_idea);
nullptr, nullptr, info->rword_text, &info->rword_indicates_list_item,
&info->rword_likely_starts_idea, &info->rword_likely_ends_idea);
}
void MakeAsciiRowInfos(const TextAndModel *row_infos, int n,
GenericVector<tesseract::RowInfo> *output) {
void MakeAsciiRowInfos(const TextAndModel* row_infos, int n,
GenericVector<tesseract::RowInfo>* output) {
output->clear();
tesseract::RowInfo info;
for (int i = 0; i < n; i++) {
@ -100,8 +94,8 @@ void MakeAsciiRowInfos(const TextAndModel *row_infos, int n,
// Given n rows of reference ground truth, evaluate whether the n rows
// of PARA * pointers yield the same paragraph breakpoints.
void EvaluateParagraphDetection(const TextAndModel *correct, int n,
const GenericVector<PARA *> &detector_output) {
void EvaluateParagraphDetection(const TextAndModel* correct, int n,
const GenericVector<PARA*>& detector_output) {
int incorrect_breaks = 0;
int missed_breaks = 0;
int poorly_matched_models = 0;
@ -111,18 +105,16 @@ void EvaluateParagraphDetection(const TextAndModel *correct, int n,
for (int i = 1; i < n; i++) {
bool has_break = correct[i].model_type != PCONT;
bool detected_break = (detector_output[i - 1] != detector_output[i]);
if (has_break && !detected_break)
missed_breaks++;
if (detected_break && !has_break)
incorrect_breaks++;
if (has_break && !detected_break) missed_breaks++;
if (detected_break && !has_break) incorrect_breaks++;
if (has_break) {
if (correct[i].model_type == PNONE) {
if (detector_output[i]->model != NULL) {
if (detector_output[i]->model != nullptr) {
poorly_matched_models++;
}
} else {
if (correct[i].model.justification() != kUnknown &&
(detector_output[i]->model == NULL ||
(detector_output[i]->model == nullptr ||
!correct[i].model.Comparable(*detector_output[i]->model))) {
poorly_matched_models++;
}
@ -131,8 +123,7 @@ void EvaluateParagraphDetection(const TextAndModel *correct, int n,
detector_output[i]->is_very_first_or_continuation) {
bad_crowns++;
}
if (correct[i].is_list_item ^
detector_output[i]->is_list_item) {
if (correct[i].is_list_item ^ detector_output[i]->is_list_item) {
bad_list_items++;
}
}
@ -180,16 +171,16 @@ void EvaluateParagraphDetection(const TextAndModel *correct, int n,
}
}
void TestParagraphDetection(const TextAndModel *correct, int num_rows) {
void TestParagraphDetection(const TextAndModel* correct, int num_rows) {
GenericVector<tesseract::RowInfo> row_infos;
GenericVector<PARA *> row_owners;
GenericVector<PARA*> row_owners;
PARA_LIST paragraphs;
GenericVector<ParagraphModel *> models;
GenericVector<ParagraphModel*> models;
MakeAsciiRowInfos(correct, num_rows, &row_infos);
int debug_level(3);
tesseract::DetectParagraphs(debug_level, &row_infos, &row_owners,
&paragraphs, &models);
tesseract::DetectParagraphs(debug_level, &row_infos, &row_owners, &paragraphs,
&models);
EvaluateParagraphDetection(correct, num_rows, row_owners);
models.delete_data_pointers();
}
@ -220,15 +211,15 @@ TEST(ParagraphsTest, ListItemsIdentified) {
typedef ParagraphModel PModel;
const TextAndModel kTwoSimpleParagraphs[] = {
{" Look here, I have a paragraph.", PSTART, PModel(kLeft, 0, 20, 0, 0)},
{"This paragraph starts at the top"},
{"of the page and takes 3 lines. "},
{" Here I have a second paragraph", PSTART, PModel(kLeft, 0, 20, 0, 0)},
{"which indicates that the first "},
{"paragraph is not a continuation "},
{"from a previous page, as it is "},
{"indented just like this second "},
{"paragraph. "},
{" Look here, I have a paragraph.", PSTART, PModel(kLeft, 0, 20, 0, 0)},
{"This paragraph starts at the top"},
{"of the page and takes 3 lines. "},
{" Here I have a second paragraph", PSTART, PModel(kLeft, 0, 20, 0, 0)},
{"which indicates that the first "},
{"paragraph is not a continuation "},
{"from a previous page, as it is "},
{"indented just like this second "},
{"paragraph. "},
};
TEST(ParagraphsTest, TestSimpleParagraphDetection) {
@ -237,33 +228,34 @@ TEST(ParagraphsTest, TestSimpleParagraphDetection) {
}
const TextAndModel kFewCluesWithCrown[] = {
{"This paragraph starts at the top", PSTART, PModel(kLeft, 0, 20, 0, 0), true},
{"of the page and takes two lines."},
{" Here I have a second paragraph", PSTART, PModel(kLeft, 0, 20, 0, 0)},
{"which indicates that the first "},
{"paragraph is a continuation from"},
{"a previous page, as it is "},
{"indented just like this second "},
{"paragraph. "},
{"This paragraph starts at the top", PSTART, PModel(kLeft, 0, 20, 0, 0),
true},
{"of the page and takes two lines."},
{" Here I have a second paragraph", PSTART, PModel(kLeft, 0, 20, 0, 0)},
{"which indicates that the first "},
{"paragraph is a continuation from"},
{"a previous page, as it is "},
{"indented just like this second "},
{"paragraph. "},
};
TEST(ParagraphsTest, TestFewCluesWithCrown) {
TestParagraphDetection(kFewCluesWithCrown,
ABSL_ARRAYSIZE(kFewCluesWithCrown));
}
const TextAndModel kCrownedParagraph[] = {
{"The first paragraph on a page is", PSTART, PModel(kLeft, 0, 20, 0, 0), true},
{"often not indented as the rest "},
{"of the paragraphs are. Nonethe-"},
{"less it should be counted as the"},
{"same type of paragraph. "},
{" The second and third para- ", PSTART, PModel(kLeft, 0, 20, 0, 0)},
{"graphs are both indented two "},
{"spaces. "},
{" The first paragraph has what ", PSTART, PModel(kLeft, 0, 20, 0, 0)},
{"fmt refers to as a 'crown.' "},
{"The first paragraph on a page is", PSTART, PModel(kLeft, 0, 20, 0, 0),
true},
{"often not indented as the rest "},
{"of the paragraphs are. Nonethe-"},
{"less it should be counted as the"},
{"same type of paragraph. "},
{" The second and third para- ", PSTART, PModel(kLeft, 0, 20, 0, 0)},
{"graphs are both indented two "},
{"spaces. "},
{" The first paragraph has what ", PSTART, PModel(kLeft, 0, 20, 0, 0)},
{"fmt refers to as a 'crown.' "},
};
TEST(ParagraphsTest, TestCrownParagraphDetection) {
@ -271,18 +263,18 @@ TEST(ParagraphsTest, TestCrownParagraphDetection) {
}
const TextAndModel kFlushLeftParagraphs[] = {
{"It is sometimes the case that", PSTART, PModel(kLeft, 0, 0, 0, 0)},
{"flush left paragraphs (those"},
{"with no body indent) are not"},
{"actually crowns. "},
{"Instead, further paragraphs are", PSTART, PModel(kLeft, 0, 0, 0, 0)},
{"also flush left aligned. Usual-"},
{"ly, these paragraphs are set"},
{"apart vertically by some white-"},
{"space, but you can also detect"},
{"them by observing the big empty"},
{"space at the ends of the para-"},
{"graphs. "},
{"It is sometimes the case that", PSTART, PModel(kLeft, 0, 0, 0, 0)},
{"flush left paragraphs (those"},
{"with no body indent) are not"},
{"actually crowns. "},
{"Instead, further paragraphs are", PSTART, PModel(kLeft, 0, 0, 0, 0)},
{"also flush left aligned. Usual-"},
{"ly, these paragraphs are set"},
{"apart vertically by some white-"},
{"space, but you can also detect"},
{"them by observing the big empty"},
{"space at the ends of the para-"},
{"graphs. "},
};
TEST(ParagraphsText, TestRealFlushLeftParagraphs) {
@ -291,46 +283,45 @@ TEST(ParagraphsText, TestRealFlushLeftParagraphs) {
};
const TextAndModel kSingleFullPageContinuation[] = {
{"sometimes a page is one giant", PSTART, PModel(kLeft, 0, 20, 0, 0), true},
{"continuation. It flows from"},
{"line to line, using the full"},
{"column width with no clear"},
{"paragraph break, because it"},
{"actually doesn't have one. It"},
{"is the middle of one monster"},
{"paragraph continued from the"},
{"previous page and continuing"},
{"onto the next page. There-"},
{"fore, it ends up getting"},
{"marked as a crown and then"},
{"getting re-marked as any ex-"},
{"isting model. Not great, but"},
{"sometimes a page is one giant", PSTART, PModel(kLeft, 0, 20, 0, 0), true},
{"continuation. It flows from"},
{"line to line, using the full"},
{"column width with no clear"},
{"paragraph break, because it"},
{"actually doesn't have one. It"},
{"is the middle of one monster"},
{"paragraph continued from the"},
{"previous page and continuing"},
{"onto the next page. There-"},
{"fore, it ends up getting"},
{"marked as a crown and then"},
{"getting re-marked as any ex-"},
{"isting model. Not great, but"},
};
TEST(ParagraphsTest, TestSingleFullPageContinuation) {
const TextAndModel *correct = kSingleFullPageContinuation;
const TextAndModel* correct = kSingleFullPageContinuation;
int num_rows = ABSL_ARRAYSIZE(kSingleFullPageContinuation);
GenericVector<tesseract::RowInfo> row_infos;
GenericVector<PARA *> row_owners;
GenericVector<PARA*> row_owners;
PARA_LIST paragraphs;
GenericVector<ParagraphModel *> models;
GenericVector<ParagraphModel*> models;
models.push_back(new ParagraphModel(kLeft, 0, 20, 0, 10));
MakeAsciiRowInfos(correct, num_rows, &row_infos);
tesseract::DetectParagraphs(3, &row_infos, &row_owners, &paragraphs,
&models);
tesseract::DetectParagraphs(3, &row_infos, &row_owners, &paragraphs, &models);
EvaluateParagraphDetection(correct, num_rows, row_owners);
models.delete_data_pointers();
}
const TextAndModel kRightAligned[] = {
{"Right-aligned paragraphs are", PSTART, PModel(kRight, 0, 0, 0, 0)},
{" uncommon in Left-to-Right"},
{" languages, but they do"},
{" exist."},
{" Mostly, however, they're", PSTART, PModel(kRight, 0, 0, 0, 0)},
{" horribly tiny paragraphs in"},
{" tables on which we have no"},
{" chance anyways."},
{"Right-aligned paragraphs are", PSTART, PModel(kRight, 0, 0, 0, 0)},
{" uncommon in Left-to-Right"},
{" languages, but they do"},
{" exist."},
{" Mostly, however, they're", PSTART, PModel(kRight, 0, 0, 0, 0)},
{" horribly tiny paragraphs in"},
{" tables on which we have no"},
{" chance anyways."},
};
TEST(ParagraphsTest, TestRightAlignedParagraph) {
@ -338,66 +329,71 @@ TEST(ParagraphsTest, TestRightAlignedParagraph) {
}
const TextAndModel kTinyParagraphs[] = {
{" Occasionally, interspersed with", PSTART, PModel(kLeft, 0, 20, 0, 0)},
{"obvious paragraph text, you might"},
{"find short exchanges of dialogue "},
{"between characters. "},
{" 'Oh?' ", PSTART, PModel(kLeft, 0, 20, 0, 0)},
{" 'Don't be confused!' ", PSTART, PModel(kLeft, 0, 20, 0, 0)},
{" 'Not me!' ", PSTART, PModel(kLeft, 0, 20, 0, 0)},
{" One naive approach would be to ", PSTART, PModel(kLeft, 0, 20, 0, 0)},
{"mark a new paragraph whenever one"},
{"of the statistics (left, right or"},
{"center) changes from one text-"},
{"line to the next. Such an"},
{"approach would misclassify the"},
{"tiny paragraphs above as a single"},
{"paragraph. "},
{" Occasionally, interspersed with", PSTART, PModel(kLeft, 0, 20, 0, 0)},
{"obvious paragraph text, you might"},
{"find short exchanges of dialogue "},
{"between characters. "},
{" 'Oh?' ", PSTART, PModel(kLeft, 0, 20, 0, 0)},
{" 'Don't be confused!' ", PSTART, PModel(kLeft, 0, 20, 0, 0)},
{" 'Not me!' ", PSTART, PModel(kLeft, 0, 20, 0, 0)},
{" One naive approach would be to ", PSTART, PModel(kLeft, 0, 20, 0, 0)},
{"mark a new paragraph whenever one"},
{"of the statistics (left, right or"},
{"center) changes from one text-"},
{"line to the next. Such an"},
{"approach would misclassify the"},
{"tiny paragraphs above as a single"},
{"paragraph. "},
};
TEST(ParagraphsTest, TestTinyParagraphs) {
TestParagraphDetection(kTinyParagraphs, ABSL_ARRAYSIZE(kTinyParagraphs));
}
const TextAndModel kComplexPage1[] = {
{" Awesome ", PSTART, PModel(kCenter, 0, 0, 0, 0)},
{" Centered Title "},
{" Paragraph Detection "},
{" OCR TEAM "},
{" 10 November 2010 "},
{" ", PNONE},
{" Look here, I have a paragraph.", PSTART, PModel(kLeft, 0, 20, 0, 0)},
{"This paragraph starts at the top"},
{"of the page and takes 3 lines. "},
{" Here I have a second paragraph", PSTART, PModel(kLeft, 0, 20, 0, 0)},
{"which indicates that the first "},
{"paragraph is not a continuation "},
{"from a previous page, as it is "},
{"indented just like this second "},
{"paragraph. "},
{" Here is a block quote. It ", PSTART, PModel(kLeft, 30, 0, 0, 0), true},
{" looks like the prior text "},
{" but it is indented more "},
{" and is fully justified. "},
{" So how does one deal with ", PSTART, PModel(kLeft, 0, 20, 0, 0)},
{"centered text, block quotes, "},
{"normal paragraphs, and lists "},
{"like what follows? "},
{"1. Make a plan. ", PSTART, PModel(kLeft, 0, 0, 30, 0), false, true},
{"2. Use a heuristic, for example,", PSTART, PModel(kLeft, 0, 0, 30, 0), false, true},
{" looking for lines where the "},
{" first word of the next line "},
{" would fit on the previous "},
{" line. "},
{"8. Try to implement the plan in ", PSTART, PModel(kLeft, 0, 0, 30, 0), false, true},
{" Python and try it out. "},
{"4. Determine how to fix the ", PSTART, PModel(kLeft, 0, 0, 30, 0), false, true},
{" mistakes. "},
{"5. Repeat. ", PSTART, PModel(kLeft, 0, 0, 30, 0), false, true},
{" For extra painful penalty work", PSTART, PModel(kLeft, 0, 20, 0, 0)},
{"you can try to identify source "},
{"code. Ouch! "},
{" Awesome ", PSTART, PModel(kCenter, 0, 0, 0, 0)},
{" Centered Title "},
{" Paragraph Detection "},
{" OCR TEAM "},
{" 10 November 2010 "},
{" ", PNONE},
{" Look here, I have a paragraph.", PSTART, PModel(kLeft, 0, 20, 0, 0)},
{"This paragraph starts at the top"},
{"of the page and takes 3 lines. "},
{" Here I have a second paragraph", PSTART, PModel(kLeft, 0, 20, 0, 0)},
{"which indicates that the first "},
{"paragraph is not a continuation "},
{"from a previous page, as it is "},
{"indented just like this second "},
{"paragraph. "},
{" Here is a block quote. It ", PSTART, PModel(kLeft, 30, 0, 0, 0),
true},
{" looks like the prior text "},
{" but it is indented more "},
{" and is fully justified. "},
{" So how does one deal with ", PSTART, PModel(kLeft, 0, 20, 0, 0)},
{"centered text, block quotes, "},
{"normal paragraphs, and lists "},
{"like what follows? "},
{"1. Make a plan. ", PSTART, PModel(kLeft, 0, 0, 30, 0),
false, true},
{"2. Use a heuristic, for example,", PSTART, PModel(kLeft, 0, 0, 30, 0),
false, true},
{" looking for lines where the "},
{" first word of the next line "},
{" would fit on the previous "},
{" line. "},
{"8. Try to implement the plan in ", PSTART, PModel(kLeft, 0, 0, 30, 0),
false, true},
{" Python and try it out. "},
{"4. Determine how to fix the ", PSTART, PModel(kLeft, 0, 0, 30, 0),
false, true},
{" mistakes. "},
{"5. Repeat. ", PSTART, PModel(kLeft, 0, 0, 30, 0),
false, true},
{" For extra painful penalty work", PSTART, PModel(kLeft, 0, 20, 0, 0)},
{"you can try to identify source "},
{"code. Ouch! "},
};
TEST(ParagraphsTest, TestComplexPage1) {
@ -406,41 +402,47 @@ TEST(ParagraphsTest, TestComplexPage1) {
// The same as above, but wider.
const TextAndModel kComplexPage2[] = {
{" Awesome ", PSTART, PModel(kCenter, 0, 0, 0, 0)},
{" Centered Title "},
{" Paragraph Detection "},
{" OCR TEAM "},
{" 10 November 2010 "},
{" ", PNONE},
{" Look here, I have a paragraph. ", PSTART, PModel(kLeft, 0, 20, 0, 0)},
{"This paragraph starts at the top of"},
{"the page and takes 3 lines. "},
{" Here I have a second paragraph ", PSTART, PModel(kLeft, 0, 20, 0, 0)},
{"which indicates that the first "},
{"paragraph is not a continuation "},
{"from a previous page, as it is in- "},
{"dented just like this second para- "},
{"graph. "},
{" Here is a block quote. It ", PSTART, PModel(kLeft, 30, 0, 0, 0), true},
{" looks like the prior text "},
{" but it is indented more "},
{" and is fully justified. "},
{" So how does one deal with center-", PSTART, PModel(kLeft, 0, 20, 0, 0)},
{"ed text, block quotes, normal para-"},
{"graphs, and lists like what follow?"},
{"1. Make a plan. "}, // BUG!!
{"2. Use a heuristic, for example, ", PSTART, PModel(kLeft, 0, 0, 30, 0), false, true},
{" looking for lines where the "},
{" first word of the next line "},
{" would fit on the previous line. "},
{"8. Try to implement the plan in ", PSTART, PModel(kLeft, 0, 0, 30, 0), false, true},
{" Python and try it out. "},
{"4. Determine how to fix the ", PSTART, PModel(kLeft, 0, 0, 30, 0), false, true},
{" mistakes. "},
{"5. Repeat. ", PSTART, PModel(kLeft, 0, 0, 30, 0), false, true},
{" For extra painful penalty work ", PSTART, PModel(kLeft, 0, 20, 0, 0)},
{"you can try to identify source "},
{"code. Ouch! "},
{" Awesome ", PSTART,
PModel(kCenter, 0, 0, 0, 0)},
{" Centered Title "},
{" Paragraph Detection "},
{" OCR TEAM "},
{" 10 November 2010 "},
{" ", PNONE},
{" Look here, I have a paragraph. ", PSTART, PModel(kLeft, 0, 20, 0, 0)},
{"This paragraph starts at the top of"},
{"the page and takes 3 lines. "},
{" Here I have a second paragraph ", PSTART, PModel(kLeft, 0, 20, 0, 0)},
{"which indicates that the first "},
{"paragraph is not a continuation "},
{"from a previous page, as it is in- "},
{"dented just like this second para- "},
{"graph. "},
{" Here is a block quote. It ", PSTART, PModel(kLeft, 30, 0, 0, 0),
true},
{" looks like the prior text "},
{" but it is indented more "},
{" and is fully justified. "},
{" So how does one deal with center-", PSTART, PModel(kLeft, 0, 20, 0, 0)},
{"ed text, block quotes, normal para-"},
{"graphs, and lists like what follow?"},
{"1. Make a plan. "}, // BUG!!
{"2. Use a heuristic, for example, ", PSTART, PModel(kLeft, 0, 0, 30, 0),
false, true},
{" looking for lines where the "},
{" first word of the next line "},
{" would fit on the previous line. "},
{"8. Try to implement the plan in ", PSTART, PModel(kLeft, 0, 0, 30, 0),
false, true},
{" Python and try it out. "},
{"4. Determine how to fix the ", PSTART, PModel(kLeft, 0, 0, 30, 0),
false, true},
{" mistakes. "},
{"5. Repeat. ", PSTART, PModel(kLeft, 0, 0, 30, 0),
false, true},
{" For extra painful penalty work ", PSTART, PModel(kLeft, 0, 20, 0, 0)},
{"you can try to identify source "},
{"code. Ouch! "},
};
TEST(ParagraphsTest, TestComplexPage2) {
@ -448,14 +450,15 @@ TEST(ParagraphsTest, TestComplexPage2) {
}
const TextAndModel kSubtleCrown[] = {
{"The first paragraph on a page is", PSTART, PModel(kLeft, 0, 20, 0, 0), true},
{"often not indented as the rest "},
{"of the paragraphs are. Nonethe-"},
{"less it should be counted as the"},
{"same type of paragraph. "},
{" Even a short second paragraph ", PSTART, PModel(kLeft, 0, 20, 0, 0)},
{"should suffice. "},
{" 1235 ", PNONE},
{"The first paragraph on a page is", PSTART, PModel(kLeft, 0, 20, 0, 0),
true},
{"often not indented as the rest "},
{"of the paragraphs are. Nonethe-"},
{"less it should be counted as the"},
{"same type of paragraph. "},
{" Even a short second paragraph ", PSTART, PModel(kLeft, 0, 20, 0, 0)},
{"should suffice. "},
{" 1235 ", PNONE},
};
TEST(ParagraphsTest, TestSubtleCrown) {
@ -467,39 +470,43 @@ TEST(ParagraphsTest, TestStrayLineInBlock) {
}
const TextAndModel kUnlvRep3AO[] = {
{" Defined contribution plans cover employees in Australia, New", PSTART, PModel(kLeft, 0, 50, 0, 0)},
{"Zealand, Spain, the United Kingdom and some U.S. subsidiaries. "},
{"In addition, employees in the U.S. are eligible to participate in "},
{"defined contribution plans (Employee Savings Plans) by contribut-"},
{"ing a portion of their compensation. The Company matches com- "},
{"pensation, depending on Company profit levels. Contributions "},
{"charged to income for defined contribution plans were $92 in "},
{"1993, $98 in 1992 and $89 in 1991. "},
{" In addition to providing pension benefits, the Company pro- ", PSTART, PModel(kLeft, 0, 50, 0, 0)},
{"vides certain health care and life insurance benefits to retired "},
{"employees. As discussed in Note A, the Company adopted FASB "},
{"Statement No. 106 effective January 1, 1992. Previously, the "},
{"Company recognized the cost of providing these benefits as the "},
{"benefits were paid. These pretax costs amounted to $53 in 1991. "},
{"The Company continues to fund most of the cost of these medical "},
{"and life insurance benefits in the year incurred. "},
{" The U.S. plan covering the parent company is the largest plan.", PSTART, PModel(kLeft, 0, 50, 0, 0)},
{"It provides medical and life insurance benefits including hospital, "},
{"physicians services and major medical expense benefits and life "},
{"insurance benefits. The plan provides benefits supplemental to "},
{"Medicare after retirees are eligible for these benefits. The cost of "},
{"these benefits are shared by the Company and the retiree, with the "},
{"Company portion increasing as the retiree has increased years of "},
{"credited service. The Company has the ability to change these "},
{"benefits at any time. "},
{" Effective October 1993, the Company amended its health ", PSTART, PModel(kLeft, 0, 50, 0, 0)},
{"benefits plan in the U.S. to cap the cost absorbed by the Company "},
{"at approximately twice the 1993 cost per person for employees who"},
{"retire after December 31, 1993. The effect of this amendment was "},
{"to reduce the December 31, 1993 accumulated postretirement "},
{"benefit obligation by $327. It also reduced the net periodic postre- "},
{"tirement cost by $21 for 1993 and is estimated to reduce this cost "},
{"for 1994 by approximately $83. "},
{" Defined contribution plans cover employees in Australia, New", PSTART,
PModel(kLeft, 0, 50, 0, 0)},
{"Zealand, Spain, the United Kingdom and some U.S. subsidiaries. "},
{"In addition, employees in the U.S. are eligible to participate in "},
{"defined contribution plans (Employee Savings Plans) by contribut-"},
{"ing a portion of their compensation. The Company matches com- "},
{"pensation, depending on Company profit levels. Contributions "},
{"charged to income for defined contribution plans were $92 in "},
{"1993, $98 in 1992 and $89 in 1991. "},
{" In addition to providing pension benefits, the Company pro- ", PSTART,
PModel(kLeft, 0, 50, 0, 0)},
{"vides certain health care and life insurance benefits to retired "},
{"employees. As discussed in Note A, the Company adopted FASB "},
{"Statement No. 106 effective January 1, 1992. Previously, the "},
{"Company recognized the cost of providing these benefits as the "},
{"benefits were paid. These pretax costs amounted to $53 in 1991. "},
{"The Company continues to fund most of the cost of these medical "},
{"and life insurance benefits in the year incurred. "},
{" The U.S. plan covering the parent company is the largest plan.",
PSTART, PModel(kLeft, 0, 50, 0, 0)},
{"It provides medical and life insurance benefits including hospital, "},
{"physicians services and major medical expense benefits and life "},
{"insurance benefits. The plan provides benefits supplemental to "},
{"Medicare after retirees are eligible for these benefits. The cost of "},
{"these benefits are shared by the Company and the retiree, with the "},
{"Company portion increasing as the retiree has increased years of "},
{"credited service. The Company has the ability to change these "},
{"benefits at any time. "},
{" Effective October 1993, the Company amended its health ", PSTART,
PModel(kLeft, 0, 50, 0, 0)},
{"benefits plan in the U.S. to cap the cost absorbed by the Company "},
{"at approximately twice the 1993 cost per person for employees who"},
{"retire after December 31, 1993. The effect of this amendment was "},
{"to reduce the December 31, 1993 accumulated postretirement "},
{"benefit obligation by $327. It also reduced the net periodic postre- "},
{"tirement cost by $21 for 1993 and is estimated to reduce this cost "},
{"for 1994 by approximately $83. "},
};
TEST(ParagraphsTest, TestUnlvInsurance) {
@ -512,19 +519,19 @@ TEST(ParagraphsTest, TestUnlvInsurance) {
// paragraph or two.
// This example comes from Volume 9886293, Page 5
const TextAndModel kTableOfContents[] = {
{"1 Hmong People ........... 1", PSTART, PModel(kUnknown, 0, 0, 0, 0)},
{" Hmong Origins . . . . . 1", PSTART, PModel(kUnknown, 0, 0, 0, 0)},
{" Language . . . . . . . 1", PSTART, PModel(kUnknown, 0, 0, 0, 0)},
{" Proverbs . . . . . . 2", PSTART, PModel(kUnknown, 0, 0, 0, 0)},
{" Discussion . . . . 2", PSTART, PModel(kUnknown, 0, 0, 0, 0)},
{" Riddles . . . . . . . 2", PSTART, PModel(kUnknown, 0, 0, 0, 0)},
{" Discussion . . . . 3", PSTART, PModel(kUnknown, 0, 0, 0, 0)},
{" Appearance . . . . . 3", PSTART, PModel(kUnknown, 0, 0, 0, 0)},
{" Hmong History . . . . . 4", PSTART, PModel(kUnknown, 0, 0, 0, 0)},
{" Hmong in SE Asia . . . 4", PSTART, PModel(kUnknown, 0, 0, 0, 0)},
{" Hmong in the West . . .5", PSTART, PModel(kUnknown, 0, 0, 0, 0)},
{" Hmong in the USA . . . 5", PSTART, PModel(kUnknown, 0, 0, 0, 0)},
{" Discussion . . . . 6", PSTART, PModel(kUnknown, 0, 0, 0, 0)},
{"1 Hmong People ........... 1", PSTART, PModel(kUnknown, 0, 0, 0, 0)},
{" Hmong Origins . . . . . 1", PSTART, PModel(kUnknown, 0, 0, 0, 0)},
{" Language . . . . . . . 1", PSTART, PModel(kUnknown, 0, 0, 0, 0)},
{" Proverbs . . . . . . 2", PSTART, PModel(kUnknown, 0, 0, 0, 0)},
{" Discussion . . . . 2", PSTART, PModel(kUnknown, 0, 0, 0, 0)},
{" Riddles . . . . . . . 2", PSTART, PModel(kUnknown, 0, 0, 0, 0)},
{" Discussion . . . . 3", PSTART, PModel(kUnknown, 0, 0, 0, 0)},
{" Appearance . . . . . 3", PSTART, PModel(kUnknown, 0, 0, 0, 0)},
{" Hmong History . . . . . 4", PSTART, PModel(kUnknown, 0, 0, 0, 0)},
{" Hmong in SE Asia . . . 4", PSTART, PModel(kUnknown, 0, 0, 0, 0)},
{" Hmong in the West . . .5", PSTART, PModel(kUnknown, 0, 0, 0, 0)},
{" Hmong in the USA . . . 5", PSTART, PModel(kUnknown, 0, 0, 0, 0)},
{" Discussion . . . . 6", PSTART, PModel(kUnknown, 0, 0, 0, 0)},
};
TEST(ParagraphsTest, TestSplitsOutLeaderLines) {
@ -532,31 +539,34 @@ TEST(ParagraphsTest, TestSplitsOutLeaderLines) {
}
const TextAndModel kTextWithSourceCode[] = {
{" A typical page of a programming book may contain", PSTART, PModel(kLeft, 0, 20, 0, 0)},
{"examples of source code to exemplify an algorithm "},
{"being described in prose. Such examples should be"},
{"rendered as lineated text, meaning text with "},
{"explicit line breaks but without extra inter-line "},
{"spacing. Accidentally finding stray paragraphs in"},
{"source code would lead to a bad reading experience"},
{"when the text is re-flowed. "},
{" Let's show this by describing the function fact-", PSTART, PModel(kLeft, 0, 20, 0, 0)},
{"orial. Factorial is a simple recursive function "},
{"which grows very quickly. So quickly, in fact, "},
{"that the typical C implementation will only work "},
{"for values less than about 12: "},
{" ", PNONE},
{" # Naive implementation in C "},
{" int factorial(int n) { "},
{" if (n < 2) "},
{" return 1; "},
{" return n * factorial(n - 1); "},
{" } "},
{" "},
{" The C programming language does not have built- ", PSTART, PModel(kLeft, 0, 20, 0, 0)},
{"in support for detecting integer overflow, so this"},
{"naive implementation simply returns random values "},
{"if even a moderate sized n is provided. "},
{" A typical page of a programming book may contain", PSTART,
PModel(kLeft, 0, 20, 0, 0)},
{"examples of source code to exemplify an algorithm "},
{"being described in prose. Such examples should be"},
{"rendered as lineated text, meaning text with "},
{"explicit line breaks but without extra inter-line "},
{"spacing. Accidentally finding stray paragraphs in"},
{"source code would lead to a bad reading experience"},
{"when the text is re-flowed. "},
{" Let's show this by describing the function fact-", PSTART,
PModel(kLeft, 0, 20, 0, 0)},
{"orial. Factorial is a simple recursive function "},
{"which grows very quickly. So quickly, in fact, "},
{"that the typical C implementation will only work "},
{"for values less than about 12: "},
{" ", PNONE},
{" # Naive implementation in C "},
{" int factorial(int n) { "},
{" if (n < 2) "},
{" return 1; "},
{" return n * factorial(n - 1); "},
{" } "},
{" "},
{" The C programming language does not have built- ", PSTART,
PModel(kLeft, 0, 20, 0, 0)},
{"in support for detecting integer overflow, so this"},
{"naive implementation simply returns random values "},
{"if even a moderate sized n is provided. "},
};
TEST(ParagraphsTest, NotDistractedBySourceCode) {
@ -565,81 +575,103 @@ TEST(ParagraphsTest, NotDistractedBySourceCode) {
}
const TextAndModel kOldManAndSea[] = {
{"royal palm which are called guano and in it there was a bed, a", PSTART, PModel(kLeft, 0, 50, 0, 0)},
{"table, one chair, and a place on the dirt floor to cook with charcoal."},
{"On the brown walls of the flattened, overlapping leaves of the"},
{"sturdy fibered guano there was a picture in color of the Sacred"},
{"Heart of Jesus and another of the Virgin of Cobre. These were"},
{"relics of his wife. Once there had been a tinted photograph of his"},
{"wife on the wall but he had taken it down because it made him too"},
{"lonely to see it and it was on the shelf in the corner under his clean"},
{"shirt. "},
{" \"What do you have to eat?\" the boy asked. ", PSTART, PModel(kLeft, 0, 50, 0, 0)},
{" \"A pot of yellow rice with fish. Do you want some?\" ", PSTART, PModel(kLeft, 0, 50, 0, 0)},
{" \"No. I will eat at home. Do you want me to make the fire?\" ", PSTART, PModel(kLeft, 0, 50, 0, 0)},
{" \"No. I will make it later on. Or I may eat the rice cold.\" ", PSTART, PModel(kLeft, 0, 50, 0, 0)},
{" \"May I take the cast net?\" ", PSTART, PModel(kLeft, 0, 50, 0, 0)},
{" \"Of course.\" ", PSTART, PModel(kLeft, 0, 50, 0, 0)},
{" There was no cast net and the boy remembered when they had", PSTART, PModel(kLeft, 0, 50, 0, 0)},
{"sold it. But they went through this fiction every day. There was no"},
{"pot of yellow rice and fish and the boy knew this too. "},
{" \"Eighty-five is a lucky number,\" the old man said. \"How", PSTART, PModel(kLeft, 0, 50, 0, 0)},
{"would you like to see me bring one in that dressed out over a thou-"},
{"sand pounds? "},
{" \"I'll get the cast net and go for sardines. Will you sit in the sun", PSTART, PModel(kLeft, 0, 50, 0, 0)},
{"in the doorway?\" "},
{" \"Yes. I have yesterday's paper and I will read the baseball.\" ", PSTART, PModel(kLeft, 0, 50, 0, 0)},
{" The boy did not know whether yesterday's paper was a fiction", PSTART, PModel(kLeft, 0, 50, 0, 0)},
{"too. But the old man brought it out from under the bed. "},
{" \"Pedrico gave it to me at the bodega,\" he explained. ", PSTART, PModel(kLeft, 0, 50, 0, 0)},
{" \"I'll be back when I have the sardines. I'll keep yours and mine", PSTART, PModel(kLeft, 0, 50, 0, 0)},
{"together on ice and we can share them in the morning. When I"},
{"come back you can tell me about the baseball.\" "},
{" \"The Yankees cannot lose.\" ", PSTART, PModel(kLeft, 0, 50, 0, 0)},
{" \"But I fear the Indians of Cleveland.\" ", PSTART, PModel(kLeft, 0, 50, 0, 0)},
{" \"Have faith in the Yankees my son. Think of the great Di-", PSTART, PModel(kLeft, 0, 50, 0, 0)},
{"Maggio.\" "},
{" \"I fear both the Tigers of Detroit and the Indians of Cleve-", PSTART, PModel(kLeft, 0, 50, 0, 0)},
{"land.\" "}
};
{"royal palm which are called guano and in it there was a bed, a",
PSTART, PModel(kLeft, 0, 50, 0, 0)},
{"table, one chair, and a place on the dirt floor to cook with charcoal."},
{"On the brown walls of the flattened, overlapping leaves of the"},
{"sturdy fibered guano there was a picture in color of the Sacred"},
{"Heart of Jesus and another of the Virgin of Cobre. These were"},
{"relics of his wife. Once there had been a tinted photograph of his"},
{"wife on the wall but he had taken it down because it made him too"},
{"lonely to see it and it was on the shelf in the corner under his clean"},
{"shirt. "},
{" \"What do you have to eat?\" the boy asked. ",
PSTART, PModel(kLeft, 0, 50, 0, 0)},
{" \"A pot of yellow rice with fish. Do you want some?\" ",
PSTART, PModel(kLeft, 0, 50, 0, 0)},
{" \"No. I will eat at home. Do you want me to make the fire?\" ",
PSTART, PModel(kLeft, 0, 50, 0, 0)},
{" \"No. I will make it later on. Or I may eat the rice cold.\" ",
PSTART, PModel(kLeft, 0, 50, 0, 0)},
{" \"May I take the cast net?\" ",
PSTART, PModel(kLeft, 0, 50, 0, 0)},
{" \"Of course.\" ",
PSTART, PModel(kLeft, 0, 50, 0, 0)},
{" There was no cast net and the boy remembered when they had",
PSTART, PModel(kLeft, 0, 50, 0, 0)},
{"sold it. But they went through this fiction every day. There was no"},
{"pot of yellow rice and fish and the boy knew this too. "
" "},
{" \"Eighty-five is a lucky number,\" the old man said. \"How",
PSTART, PModel(kLeft, 0, 50, 0, 0)},
{"would you like to see me bring one in that dressed out over a "
"thou-"},
{"sand pounds? "
" "},
{" \"I'll get the cast net and go for sardines. Will you sit in the "
"sun",
PSTART, PModel(kLeft, 0, 50, 0, 0)},
{"in the doorway?\" "
" "},
{" \"Yes. I have yesterday's paper and I will read the baseball.\" ",
PSTART, PModel(kLeft, 0, 50, 0, 0)},
{" The boy did not know whether yesterday's paper was a fiction",
PSTART, PModel(kLeft, 0, 50, 0, 0)},
{"too. But the old man brought it out from under the bed. "},
{" \"Pedrico gave it to me at the bodega,\" he explained. "
" ",
PSTART, PModel(kLeft, 0, 50, 0, 0)},
{" \"I'll be back when I have the sardines. I'll keep yours and mine",
PSTART, PModel(kLeft, 0, 50, 0, 0)},
{"together on ice and we can share them in the morning. When I"},
{"come back you can tell me about the baseball.\" "},
{" \"The Yankees cannot lose.\" ",
PSTART, PModel(kLeft, 0, 50, 0, 0)},
{" \"But I fear the Indians of Cleveland.\" ",
PSTART, PModel(kLeft, 0, 50, 0, 0)},
{" \"Have faith in the Yankees my son. Think of the great Di-",
PSTART, PModel(kLeft, 0, 50, 0, 0)},
{"Maggio.\" "},
{" \"I fear both the Tigers of Detroit and the Indians of Cleve-",
PSTART, PModel(kLeft, 0, 50, 0, 0)},
{"land.\" "}};
TEST(ParagraphsTest, NotOverlyAggressiveWithBlockQuotes) {
TestParagraphDetection(kOldManAndSea, ABSL_ARRAYSIZE(kOldManAndSea));
}
const TextAndModel kNewZealandIndex[] = {
{"Oats, 51 ", PSTART, PModel(kLeft, 0, 0, 30, 0)},
{"O'Brien, Gregory, 175 ", PSTART, PModel(kLeft, 0, 0, 30, 0)},
{"Occupational composition, 110,", PSTART, PModel(kLeft, 0, 0, 30, 0)},
{" 138 "},
{"OECD rankings, 155, 172 ", PSTART, PModel(kLeft, 0, 0, 30, 0)},
{"Okiato (original capital), 47 ", PSTART, PModel(kLeft, 0, 0, 30, 0)},
{"Oil shock: 1974, xxx, 143; 1979,", PSTART, PModel(kLeft, 0, 0, 30, 0)},
{" 145 "},
{"Old Age Pensions, xxii, 89-90 ", PSTART, PModel(kLeft, 0, 0, 30, 0)},
{"Old World evils, 77 ", PSTART, PModel(kLeft, 0, 0, 30, 0)},
{"Oliver, W. H., 39, 77, 89 ", PSTART, PModel(kLeft, 0, 0, 30, 0)},
{"Olssen, Erik, 45, 64, 84 ", PSTART, PModel(kLeft, 0, 0, 30, 0)},
{"Olympic Games, 1924, 111, 144 ", PSTART, PModel(kLeft, 0, 0, 30, 0)},
{"Once on Chunuk Bair, 149 ", PSTART, PModel(kLeft, 0, 0, 30, 0)},
{"Once Were Warriors, xxxiii, 170", PSTART, PModel(kLeft, 0, 0, 30, 0)},
{"On—shore whaling, xvi ", PSTART, PModel(kLeft, 0, 0, 30, 0)},
{"Opotiki, xix ", PSTART, PModel(kLeft, 0, 0, 30, 0)},
{"Orakau battle of, xviii, 57 ", PSTART, PModel(kLeft, 0, 0, 30, 0)},
{"ORegan, Tipene, 170, 198-99 ", PSTART, PModel(kLeft, 0, 0, 30, 0)},
{"Organic agriculture, 177 ", PSTART, PModel(kLeft, 0, 0, 30, 0)},
{"Orwell, George, 151 ", PSTART, PModel(kLeft, 0, 0, 30, 0)},
{"Otago, xvii, 45, 49-50, 70 ", PSTART, PModel(kLeft, 0, 0, 30, 0)},
{"Otago block, xvii ", PSTART, PModel(kLeft, 0, 0, 30, 0)},
{"Otago Daily Times, 67 ", PSTART, PModel(kLeft, 0, 0, 30, 0)},
{"Otago Girls High School, xix, 61,", PSTART, PModel(kLeft, 0, 0, 30, 0)},
{" 85 "},
{"Otago gold rushes, 61-63 ", PSTART, PModel(kLeft, 0, 0, 30, 0)},
{"Otago Peninsula, xx ", PSTART, PModel(kLeft, 0, 0, 30, 0)},
{"Otago Provincial Council, 68 ", PSTART, PModel(kLeft, 0, 0, 30, 0)},
{"Otaki, 33 ", PSTART, PModel(kLeft, 0, 0, 30, 0)},
{"Owls Do Cry, 139 ", PSTART, PModel(kLeft, 0, 0, 30, 0)}
};
{"Oats, 51 ", PSTART, PModel(kLeft, 0, 0, 30, 0)},
{"O'Brien, Gregory, 175 ", PSTART, PModel(kLeft, 0, 0, 30, 0)},
{"Occupational composition, 110,", PSTART, PModel(kLeft, 0, 0, 30, 0)},
{" 138 "},
{"OECD rankings, 155, 172 ", PSTART, PModel(kLeft, 0, 0, 30, 0)},
{"Okiato (original capital), 47 ", PSTART, PModel(kLeft, 0, 0, 30, 0)},
{"Oil shock: 1974, xxx, 143; 1979,", PSTART, PModel(kLeft, 0, 0, 30, 0)},
{" 145 "},
{"Old Age Pensions, xxii, 89-90 ", PSTART, PModel(kLeft, 0, 0, 30, 0)},
{"Old World evils, 77 ", PSTART, PModel(kLeft, 0, 0, 30, 0)},
{"Oliver, W. H., 39, 77, 89 ", PSTART, PModel(kLeft, 0, 0, 30, 0)},
{"Olssen, Erik, 45, 64, 84 ", PSTART, PModel(kLeft, 0, 0, 30, 0)},
{"Olympic Games, 1924, 111, 144 ", PSTART, PModel(kLeft, 0, 0, 30, 0)},
{"Once on Chunuk Bair, 149 ", PSTART, PModel(kLeft, 0, 0, 30, 0)},
{"Once Were Warriors, xxxiii, 170", PSTART, PModel(kLeft, 0, 0, 30, 0)},
{"On—shore whaling, xvi ", PSTART, PModel(kLeft, 0, 0, 30, 0)},
{"Opotiki, xix ", PSTART, PModel(kLeft, 0, 0, 30, 0)},
{"Orakau battle of, xviii, 57 ", PSTART, PModel(kLeft, 0, 0, 30, 0)},
{"ORegan, Tipene, 170, 198-99 ", PSTART, PModel(kLeft, 0, 0, 30, 0)},
{"Organic agriculture, 177 ", PSTART, PModel(kLeft, 0, 0, 30, 0)},
{"Orwell, George, 151 ", PSTART, PModel(kLeft, 0, 0, 30, 0)},
{"Otago, xvii, 45, 49-50, 70 ", PSTART, PModel(kLeft, 0, 0, 30, 0)},
{"Otago block, xvii ", PSTART, PModel(kLeft, 0, 0, 30, 0)},
{"Otago Daily Times, 67 ", PSTART, PModel(kLeft, 0, 0, 30, 0)},
{"Otago Girls High School, xix, 61,", PSTART, PModel(kLeft, 0, 0, 30, 0)},
{" 85 "},
{"Otago gold rushes, 61-63 ", PSTART, PModel(kLeft, 0, 0, 30, 0)},
{"Otago Peninsula, xx ", PSTART, PModel(kLeft, 0, 0, 30, 0)},
{"Otago Provincial Council, 68 ", PSTART, PModel(kLeft, 0, 0, 30, 0)},
{"Otaki, 33 ", PSTART, PModel(kLeft, 0, 0, 30, 0)},
{"Owls Do Cry, 139 ", PSTART, PModel(kLeft, 0, 0, 30, 0)}};
TEST(ParagraphsTest, IndexPageTest) {
TestParagraphDetection(kNewZealandIndex, ABSL_ARRAYSIZE(kNewZealandIndex));

View File

@ -10,15 +10,14 @@ namespace {
class ParamsModelTest : public testing::Test {
protected:
string TestDataNameToPath(const string& name) const {
return file::JoinPath(FLAGS_test_srcdir,
"testdata/" + name);
return file::JoinPath(FLAGS_test_srcdir, "testdata/" + name);
}
string OutputNameToPath(const string& name) const {
return file::JoinPath(FLAGS_test_tmpdir, name);
}
// Test that we are able to load a params model, save it, reload it,
// and verify that the re-serialized version is the same as the original.
void TestParamsModelRoundTrip(const string &params_model_filename) const {
void TestParamsModelRoundTrip(const string& params_model_filename) const {
tesseract::ParamsModel orig_model;
tesseract::ParamsModel duplicate_model;
string orig_file = TestDataNameToPath(params_model_filename);
@ -26,7 +25,7 @@ class ParamsModelTest : public testing::Test {
EXPECT_TRUE(orig_model.LoadFromFile("eng", orig_file.c_str()));
EXPECT_TRUE(orig_model.SaveToFile(out_file.c_str()));
EXPECT_TRUE(duplicate_model.LoadFromFile("eng", out_file.c_str()));
EXPECT_TRUE(orig_model.Equivalent(duplicate_model));
}

View File

@ -16,146 +16,146 @@
// expects clone of tessdata_fast repo in ../../tessdata_fast
#include "include_gunit.h"
#include "gmock/gmock.h"
#include "baseapi.h"
#include "ocrclass.h"
#include "leptonica/allheaders.h"
#include <iostream>
#include <string>
#include <fstream>
#include <locale>
#include <limits.h>
#include <time.h>
#include <fstream>
#include <iostream>
#include <locale>
#include <string>
#include "baseapi.h"
#include "gmock/gmock.h"
#include "include_gunit.h"
#include "leptonica/allheaders.h"
#include "ocrclass.h"
namespace {
class QuickTest : public testing::Test {
protected:
virtual void SetUp() {
start_time_ = time(nullptr);
}
virtual void SetUp() { start_time_ = time(nullptr); }
virtual void TearDown() {
const time_t end_time = time(nullptr);
EXPECT_TRUE(end_time - start_time_ <=25) << "The test took too long - " << ::testing::PrintToString(end_time - start_time_);
EXPECT_TRUE(end_time - start_time_ <= 25)
<< "The test took too long - "
<< ::testing::PrintToString(end_time - start_time_);
}
time_t start_time_;
};
};
class ClassicMockProgressSink {
public:
MOCK_METHOD1(classicProgress, bool( int ) );
MOCK_METHOD1(cancel, bool( int ));
class ClassicMockProgressSink {
public:
MOCK_METHOD1(classicProgress, bool(int));
MOCK_METHOD1(cancel, bool(int));
ETEXT_DESC monitor;
ETEXT_DESC monitor;
ClassicMockProgressSink()
{
monitor.progress_callback = []( int progress, int, int, int, int ) ->bool {
return instance->classicProgress( progress );
};
monitor.cancel = []( void* ths, int words ) -> bool {
return ((ClassicMockProgressSink*)ths)->cancel(words);
};
monitor.cancel_this = this;
instance = this;
}
static ClassicMockProgressSink* instance;
};
ClassicMockProgressSink* ClassicMockProgressSink::instance = nullptr;
class NewMockProgressSink : public ClassicMockProgressSink {
public:
MOCK_METHOD1(progress, bool( int ) );
NewMockProgressSink()
{
monitor.progress_callback2 = [](ETEXT_DESC* ths, int, int, int, int ) -> bool {
return ((NewMockProgressSink*)ths->cancel_this)->progress( ths->progress );
};
}
};
void ClassicProgressTester(const char* imgname, const char* tessdatadir, const char* lang) {
using ::testing::_;
using ::testing::AllOf;
using ::testing::AtLeast;
using ::testing::DoAll;
using ::testing::Gt;
using ::testing::Le;
using ::testing::Return;
using ::testing::SaveArg;
tesseract::TessBaseAPI *api = new tesseract::TessBaseAPI();
ASSERT_FALSE(api->Init(tessdatadir, lang)) << "Could not initialize tesseract.";
Pix *image = pixRead(imgname);
ASSERT_TRUE(image != nullptr) << "Failed to read test image.";
api->SetImage(image);
ClassicMockProgressSink progressSink;
int currentProgress = -1;
EXPECT_CALL( progressSink, classicProgress(AllOf(Gt<int&>(currentProgress),Le(100))) )
.Times(AtLeast(5))
.WillRepeatedly( DoAll(SaveArg<0>(&currentProgress),
Return(false) ));
EXPECT_CALL( progressSink, cancel(_) )
.Times(AtLeast(5))
.WillRepeatedly(Return(false));
EXPECT_EQ( api->Recognize( &progressSink.monitor ), false );
EXPECT_GE( currentProgress, 50 ) << "The reported progress did not reach 50%";
api->End();
pixDestroy(&image);
ClassicMockProgressSink() {
monitor.progress_callback = [](int progress, int, int, int, int) -> bool {
return instance->classicProgress(progress);
};
monitor.cancel = [](void* ths, int words) -> bool {
return ((ClassicMockProgressSink*)ths)->cancel(words);
};
monitor.cancel_this = this;
instance = this;
}
void NewProgressTester(const char* imgname, const char* tessdatadir, const char* lang) {
using ::testing::_;
using ::testing::AllOf;
using ::testing::AtLeast;
using ::testing::DoAll;
using ::testing::Gt;
using ::testing::Le;
using ::testing::Return;
using ::testing::SaveArg;
static ClassicMockProgressSink* instance;
};
tesseract::TessBaseAPI *api = new tesseract::TessBaseAPI();
ASSERT_FALSE(api->Init(tessdatadir, lang)) << "Could not initialize tesseract.";
Pix *image = pixRead(imgname);
ASSERT_TRUE(image != nullptr) << "Failed to read test image.";
api->SetImage(image);
ClassicMockProgressSink* ClassicMockProgressSink::instance = nullptr;
NewMockProgressSink progressSink;
class NewMockProgressSink : public ClassicMockProgressSink {
public:
MOCK_METHOD1(progress, bool(int));
int currentProgress = -1;
EXPECT_CALL( progressSink, classicProgress(_) )
.Times(0);
EXPECT_CALL( progressSink, progress(AllOf(Gt<int&>(currentProgress),Le(100))) )
.Times(AtLeast(5))
.WillRepeatedly( DoAll(SaveArg<0>(&currentProgress),
Return(false) ));
EXPECT_CALL( progressSink, cancel(_) )
.Times(AtLeast(5))
.WillRepeatedly(Return(false));
EXPECT_EQ( api->Recognize( &progressSink.monitor ), false );
EXPECT_GE( currentProgress, 50 ) << "The reported progress did not reach 50%";
api->End();
pixDestroy(&image);
NewMockProgressSink() {
monitor.progress_callback2 = [](ETEXT_DESC* ths, int, int, int,
int) -> bool {
return ((NewMockProgressSink*)ths->cancel_this)->progress(ths->progress);
};
}
};
TEST(QuickTest, ClassicProgressReporitng) {
ClassicProgressTester(TESTING_DIR "/phototest.tif",
TESSDATA_DIR "_fast", "eng");
}
void ClassicProgressTester(const char* imgname, const char* tessdatadir,
const char* lang) {
using ::testing::_;
using ::testing::AllOf;
using ::testing::AtLeast;
using ::testing::DoAll;
using ::testing::Gt;
using ::testing::Le;
using ::testing::Return;
using ::testing::SaveArg;
TEST(QuickTest, NewProgressReporitng) {
NewProgressTester(TESTING_DIR "/phototest.tif",
TESSDATA_DIR "_fast", "eng");
}
tesseract::TessBaseAPI* api = new tesseract::TessBaseAPI();
ASSERT_FALSE(api->Init(tessdatadir, lang))
<< "Could not initialize tesseract.";
Pix* image = pixRead(imgname);
ASSERT_TRUE(image != nullptr) << "Failed to read test image.";
api->SetImage(image);
ClassicMockProgressSink progressSink;
int currentProgress = -1;
EXPECT_CALL(progressSink,
classicProgress(AllOf(Gt<int&>(currentProgress), Le(100))))
.Times(AtLeast(5))
.WillRepeatedly(DoAll(SaveArg<0>(&currentProgress), Return(false)));
EXPECT_CALL(progressSink, cancel(_))
.Times(AtLeast(5))
.WillRepeatedly(Return(false));
EXPECT_EQ(api->Recognize(&progressSink.monitor), false);
EXPECT_GE(currentProgress, 50) << "The reported progress did not reach 50%";
api->End();
pixDestroy(&image);
}
void NewProgressTester(const char* imgname, const char* tessdatadir,
const char* lang) {
using ::testing::_;
using ::testing::AllOf;
using ::testing::AtLeast;
using ::testing::DoAll;
using ::testing::Gt;
using ::testing::Le;
using ::testing::Return;
using ::testing::SaveArg;
tesseract::TessBaseAPI* api = new tesseract::TessBaseAPI();
ASSERT_FALSE(api->Init(tessdatadir, lang))
<< "Could not initialize tesseract.";
Pix* image = pixRead(imgname);
ASSERT_TRUE(image != nullptr) << "Failed to read test image.";
api->SetImage(image);
NewMockProgressSink progressSink;
int currentProgress = -1;
EXPECT_CALL(progressSink, classicProgress(_)).Times(0);
EXPECT_CALL(progressSink, progress(AllOf(Gt<int&>(currentProgress), Le(100))))
.Times(AtLeast(5))
.WillRepeatedly(DoAll(SaveArg<0>(&currentProgress), Return(false)));
EXPECT_CALL(progressSink, cancel(_))
.Times(AtLeast(5))
.WillRepeatedly(Return(false));
EXPECT_EQ(api->Recognize(&progressSink.monitor), false);
EXPECT_GE(currentProgress, 50) << "The reported progress did not reach 50%";
api->End();
pixDestroy(&image);
}
TEST(QuickTest, ClassicProgressReporitng) {
ClassicProgressTester(TESTING_DIR "/phototest.tif", TESSDATA_DIR "_fast",
"eng");
}
TEST(QuickTest, NewProgressReporitng) {
NewProgressTester(TESTING_DIR "/phototest.tif", TESSDATA_DIR "_fast", "eng");
}
} // namespace

View File

@ -18,14 +18,13 @@ class TestableQRSequenceGenerator : public QRSequenceGenerator {
TEST(QRSequenceGenerator, GetBinaryReversedInteger) {
const int kRangeSize = 8;
TestableQRSequenceGenerator generator(kRangeSize);
int reversed_vals[kRangeSize] = { 0, 4, 2, 6, 1, 5, 3, 7};
int reversed_vals[kRangeSize] = {0, 4, 2, 6, 1, 5, 3, 7};
for (int i = 0; i < kRangeSize; ++i)
EXPECT_EQ(reversed_vals[i], generator.GetBinaryReversedInteger(i));
}
// Trivial test fixture for a parameterized test.
class QRSequenceGeneratorTest : public ::testing::TestWithParam<int> {
};
class QRSequenceGeneratorTest : public ::testing::TestWithParam<int> {};
TEST_P(QRSequenceGeneratorTest, GeneratesValidSequence) {
const int kRangeSize = GetParam();
@ -33,8 +32,7 @@ TEST_P(QRSequenceGeneratorTest, GeneratesValidSequence) {
std::vector<int> vals(kRangeSize);
CycleTimer timer;
timer.Restart();
for (int i = 0; i < kRangeSize; ++i)
vals[i] = generator.GetVal();
for (int i = 0; i < kRangeSize; ++i) vals[i] = generator.GetVal();
LOG(INFO) << kRangeSize << "-length sequence took " << timer.Get() * 1e3
<< "ms";
// Sort the numbers to verify that we've covered the range without repetition.

View File

@ -10,10 +10,10 @@
using tesseract::CCUtil;
using tesseract::Dict;
using tesseract::RecodedCharID;
using tesseract::RecodeBeamSearch;
using tesseract::RecodeNode;
using tesseract::PointerVector;
using tesseract::RecodeBeamSearch;
using tesseract::RecodedCharID;
using tesseract::RecodeNode;
using tesseract::TRand;
using tesseract::UnicharCompress;
@ -59,13 +59,11 @@ class RecodeBeamTest : public ::testing::Test {
// Loads and compresses the given unicharset.
void LoadUnicharset(const string& unicharset_name) {
string radical_stroke_file =
file::JoinPath(FLAGS_test_srcdir,
"tesseract/training"
"/langdata/radical-stroke.txt");
string unicharset_file = file::JoinPath(
FLAGS_test_srcdir, "testdata",
unicharset_name);
string radical_stroke_file = file::JoinPath(FLAGS_test_srcdir,
"tesseract/training"
"/langdata/radical-stroke.txt");
string unicharset_file =
file::JoinPath(FLAGS_test_srcdir, "testdata", unicharset_name);
string uni_data;
CHECK_OK(file::GetContents(unicharset_file, &uni_data, file::Defaults()));
string radical_data;
@ -94,10 +92,9 @@ class RecodeBeamTest : public ::testing::Test {
// Loads the dictionary.
void LoadDict(const string& lang) {
string traineddata_name = lang + ".traineddata";
string traineddata_file = file::JoinPath(
FLAGS_test_srcdir, "testdata",
traineddata_name);
lstm_dict_.SetupForLoad(NULL);
string traineddata_file =
file::JoinPath(FLAGS_test_srcdir, "testdata", traineddata_name);
lstm_dict_.SetupForLoad(nullptr);
tesseract::TessdataManager mgr;
mgr.Init(traineddata_file.c_str());
lstm_dict_.LoadLSTM(lang.c_str(), &mgr);
@ -113,7 +110,7 @@ class RecodeBeamTest : public ::testing::Test {
truth_utf8 += ccutil_.unicharset.id_to_unichar(transcription[i]);
}
PointerVector<WERD_RES> words;
ExpectCorrect(output, truth_utf8, NULL, &words);
ExpectCorrect(output, truth_utf8, nullptr, &words);
}
void ExpectCorrect(const GENERIC_2D_ARRAY<float>& output,
const string& truth_utf8, Dict* dict,
@ -140,8 +137,8 @@ class RecodeBeamTest : public ::testing::Test {
code.length() < RecodedCharID::kMaxCodeLen &&
(uni_id == INVALID_UNICHAR_ID ||
!recoder_.IsValidFirstCode(labels[index])));
EXPECT_NE(INVALID_UNICHAR_ID, uni_id) << "index=" << index << "/"
<< labels.size();
EXPECT_NE(INVALID_UNICHAR_ID, uni_id)
<< "index=" << index << "/" << labels.size();
// To the extent of truth_utf8, we expect decoded to match, but if
// transcription is shorter, that is OK too, as we may just be testing
// that we get a valid sequence when padded with random data.
@ -248,7 +245,7 @@ class RecodeBeamTest : public ::testing::Test {
int t = start_t;
GenericVector<int> unichar_ids;
EXPECT_TRUE(ccutil_.unicharset.encode_string(utf8_str, true, &unichar_ids,
NULL, NULL));
nullptr, nullptr));
if (unichar_ids.empty() || utf8_str[0] == '\0') {
unichar_ids.clear();
unichar_ids.push_back(unichar_null_char_);
@ -291,7 +288,7 @@ class RecodeBeamTest : public ::testing::Test {
const float scores2[],
TRand* random) {
int width = 0;
while (chars1[width] != NULL) ++width;
while (chars1[width] != nullptr) ++width;
int padding = width * RecodedCharID::kMaxCodeLen;
int num_codes = recoder_.code_range();
GENERIC_2D_ARRAY<float> outputs(width + padding, num_codes, 0.0f);
@ -407,9 +404,9 @@ TEST_F(RecodeBeamTest, EngDictionary) {
GENERIC_2D_ARRAY<float> outputs = GenerateSyntheticOutputs(
kGWRTops, kGWRTopScores, kGWR2nds, kGWR2ndScores, nullptr);
string default_str;
for (int i = 0; kGWRTops[i] != NULL; ++i) default_str += kGWRTops[i];
for (int i = 0; kGWRTops[i] != nullptr; ++i) default_str += kGWRTops[i];
PointerVector<WERD_RES> words;
ExpectCorrect(outputs, default_str, NULL, &words);
ExpectCorrect(outputs, default_str, nullptr, &words);
// Now try again with the dictionary.
LoadDict("eng_beam");
ExpectCorrect(outputs, "Gets words right.", &lstm_dict_, &words);
@ -421,7 +418,7 @@ TEST_F(RecodeBeamTest, ChiDictionary) {
GENERIC_2D_ARRAY<float> outputs = GenerateSyntheticOutputs(
kZHTops, kZHTopScores, kZH2nds, kZH2ndScores, nullptr);
PointerVector<WERD_RES> words;
ExpectCorrect(outputs, "实学储啬投学生", NULL, &words);
ExpectCorrect(outputs, "实学储啬投学生", nullptr, &words);
// Each is an individual word, with permuter = top choice.
EXPECT_EQ(7, words.size());
for (int w = 0; w < words.size(); ++w) {

View File

@ -16,12 +16,10 @@
namespace {
class TBOXTest : public testing::Test {
public:
void SetUp() {
}
public:
void SetUp() {}
void TearDown() {
}
void TearDown() {}
};
TEST_F(TBOXTest, OverlapInside) {
@ -56,10 +54,8 @@ TEST_F(TBOXTest, OverlapFractionCorners) {
mid.overlap_fraction(bottom_left));
EXPECT_DOUBLE_EQ((5.0 * 5.0) / (10.0 * 10.0),
bottom_left.overlap_fraction(mid));
EXPECT_DOUBLE_EQ((5.0 * 5.0) / (20.0 * 20.0),
mid.overlap_fraction(top_left));
EXPECT_DOUBLE_EQ((5.0 * 5.0) / (10.0 * 10.0),
top_left.overlap_fraction(mid));
EXPECT_DOUBLE_EQ((5.0 * 5.0) / (20.0 * 20.0), mid.overlap_fraction(top_left));
EXPECT_DOUBLE_EQ((5.0 * 5.0) / (10.0 * 10.0), top_left.overlap_fraction(mid));
}
TEST_F(TBOXTest, OverlapBoolSides) {
@ -175,4 +171,4 @@ TEST_F(TBOXTest, OverlapYFractionZeroSize) {
EXPECT_DOUBLE_EQ(0.0, small.y_overlap_fraction(zero));
}
} // namespace
} // namespace

View File

@ -12,18 +12,18 @@ DEFINE_string(tess_config, "", "config file for tesseract");
DEFINE_bool(visual_test, false, "Runs a visual test using scrollview");
using tesseract::PageIterator;
using tesseract::ResultIterator;
using tesseract::PageIteratorLevel;
using tesseract::ResultIterator;
// Helper functions for converting to STL vectors
template<typename T>
void ToVector(const GenericVector<T> &from, std::vector<T> *to) {
template <typename T>
void ToVector(const GenericVector<T>& from, std::vector<T>* to) {
to->clear();
for (int i = 0; i < from.size(); i++) to->push_back(from[i]);
}
template<typename T>
void ToVector(const GenericVectorEqEq<T> &from, std::vector<T> *to) {
template <typename T>
void ToVector(const GenericVectorEqEq<T>& from, std::vector<T>* to) {
to->clear();
for (int i = 0; i < from.size(); i++) to->push_back(from[i]);
}
@ -32,22 +32,17 @@ void ToVector(const GenericVectorEqEq<T> &from, std::vector<T> *to) {
class ResultIteratorTest : public testing::Test {
protected:
string TestDataNameToPath(const string& name) {
return file::JoinPath(FLAGS_test_srcdir,
"testdata/" + name);
return file::JoinPath(FLAGS_test_srcdir, "testdata/" + name);
}
string TessdataPath() {
return file::JoinPath(FLAGS_test_srcdir,
"tessdata");
return file::JoinPath(FLAGS_test_srcdir, "tessdata");
}
string OutputNameToPath(const string& name) {
return file::JoinPath(FLAGS_test_tmpdir, name);
}
ResultIteratorTest() {
src_pix_ = NULL;
}
~ResultIteratorTest() {
}
ResultIteratorTest() { src_pix_ = nullptr; }
~ResultIteratorTest() {}
void SetImage(const char* filename) {
src_pix_ = pixRead(TestDataNameToPath(filename).c_str());
@ -63,16 +58,14 @@ class ResultIteratorTest : public testing::Test {
// Rebuilds the image using the binary images at the given level, and
// EXPECTs that the number of pixels in the xor of the rebuilt image with
// the original is at most max_diff.
void VerifyRebuild(int max_diff,
PageIteratorLevel level, PageIterator* it) {
void VerifyRebuild(int max_diff, PageIteratorLevel level, PageIterator* it) {
it->Begin();
int width = pixGetWidth(src_pix_);
int height = pixGetHeight(src_pix_);
int depth = pixGetDepth(src_pix_);
Pix* pix = pixCreate(width, height, depth);
EXPECT_TRUE(depth == 1 || depth == 8);
if (depth == 8)
pixSetAll(pix);
if (depth == 8) pixSetAll(pix);
do {
int left, top, right, bottom;
PageIteratorLevel im_level = level;
@ -81,8 +74,8 @@ class ResultIteratorTest : public testing::Test {
im_level = tesseract::RIL_BLOCK;
EXPECT_TRUE(it->BoundingBox(im_level, &left, &top, &right, &bottom));
}
VLOG(1) << "BBox: [L:" << left << ", T:" << top
<< ", R:" << right << ", B:" << bottom << "]";
VLOG(1) << "BBox: [L:" << left << ", T:" << top << ", R:" << right
<< ", B:" << bottom << "]";
Pix* block_pix;
if (depth == 1) {
block_pix = it->GetBinaryImage(im_level);
@ -90,9 +83,9 @@ class ResultIteratorTest : public testing::Test {
PIX_SRC ^ PIX_DST, block_pix, 0, 0);
} else {
block_pix = it->GetImage(im_level, 2, src_pix_, &left, &top);
pixRasterop(pix, left, top,
pixGetWidth(block_pix), pixGetHeight(block_pix),
PIX_SRC & PIX_DST, block_pix, 0, 0);
pixRasterop(pix, left, top, pixGetWidth(block_pix),
pixGetHeight(block_pix), PIX_SRC & PIX_DST, block_pix, 0,
0);
}
CHECK(block_pix != nullptr);
pixDestroy(&block_pix);
@ -109,7 +102,7 @@ class ResultIteratorTest : public testing::Test {
if (base::GetFlag(FLAGS_v) >= 1)
pixWrite(OutputNameToPath("rebuiltxor.png").c_str(), pix, IFF_PNG);
l_int32 pixcount;
pixCountPixels(pix, &pixcount, NULL);
pixCountPixels(pix, &pixcount, nullptr);
if (pixcount > max_diff) {
string outfile = OutputNameToPath("failedxor.png");
VLOG(1) << "outfile = " << outfile;
@ -123,8 +116,7 @@ class ResultIteratorTest : public testing::Test {
// Rebuilds the text from the iterator strings at the given level, and
// EXPECTs that the rebuild string exactly matches the truth string.
void VerifyIteratorText(const string& truth,
PageIteratorLevel level,
void VerifyIteratorText(const string& truth, PageIteratorLevel level,
ResultIterator* it) {
VLOG(1) << "Text Test Level " << level;
it->Begin();
@ -132,7 +124,7 @@ class ResultIteratorTest : public testing::Test {
do {
char* text = it->GetUTF8Text(level);
result += text;
delete [] text;
delete[] text;
if ((level == tesseract::RIL_WORD || level == tesseract::RIL_SYMBOL) &&
it->IsAtFinalElement(tesseract::RIL_WORD, level)) {
if (it->IsAtFinalElement(tesseract::RIL_TEXTLINE, level)) {
@ -140,8 +132,7 @@ class ResultIteratorTest : public testing::Test {
} else {
result += ' ';
}
if (it->IsAtFinalElement(tesseract::RIL_PARA, level))
result += '\n';
if (it->IsAtFinalElement(tesseract::RIL_PARA, level)) result += '\n';
}
} while (it->Next(level));
EXPECT_STREQ(truth.c_str(), result.c_str())
@ -170,9 +161,10 @@ class ResultIteratorTest : public testing::Test {
// expected output reading order
// (expected_reading_order[num_reading_order_entries]) and a given reading
// context (ltr or rtl).
void ExpectTextlineReadingOrder(
bool in_ltr_context, StrongScriptDirection *word_dirs, int num_words,
int *expected_reading_order, int num_reading_order_entries) const {
void ExpectTextlineReadingOrder(bool in_ltr_context,
StrongScriptDirection* word_dirs,
int num_words, int* expected_reading_order,
int num_reading_order_entries) const {
GenericVector<StrongScriptDirection> gv_word_dirs;
for (int i = 0; i < num_words; i++) {
gv_word_dirs.push_back(word_dirs[i]);
@ -195,7 +187,7 @@ class ResultIteratorTest : public testing::Test {
// Sane means that the output contains some permutation of the indices
// 0..[num_words - 1] interspersed optionally with negative (marker) values.
void VerifySaneTextlineOrder(bool in_ltr_context,
StrongScriptDirection *word_dirs,
StrongScriptDirection* word_dirs,
int num_words) const {
GenericVector<StrongScriptDirection> gv_word_dirs;
for (int i = 0; i < num_words; i++) {
@ -235,29 +227,28 @@ class ResultIteratorTest : public testing::Test {
tesseract::TessBaseAPI api_;
};
// Tests layout analysis output (and scrollview) on the UNLV page numbered
// 8087_054.3G.tif. (Dubrovnik), but only if --visual_test is true.
TEST_F(ResultIteratorTest, VisualTest) {
if (!FLAGS_visual_test) return;
const char* kIms[] = {"8087_054.3G.tif", "8071_093.3B.tif", NULL};
for (int i = 0; kIms[i] != NULL; ++i) {
const char* kIms[] = {"8087_054.3G.tif", "8071_093.3B.tif", nullptr};
for (int i = 0; kIms[i] != nullptr; ++i) {
SetImage(kIms[i]);
// Just run layout analysis.
PageIterator* it = api_.AnalyseLayout();
EXPECT_FALSE(it == NULL);
EXPECT_FALSE(it == nullptr);
// Make a scrollview window for the display.
int width = pixGetWidth(src_pix_);
int height = pixGetHeight(src_pix_);
ScrollView* win = new ScrollView(kIms[i], 100, 100,
width / 2, height / 2, width, height);
ScrollView* win =
new ScrollView(kIms[i], 100, 100, width / 2, height / 2, width, height);
win->Image(src_pix_, 0, 0);
it->Begin();
ScrollView::Color color = ScrollView::RED;
win->Brush(ScrollView::NONE);
do {
Pta* pts = it->BlockPolygon();
if (pts != NULL) {
if (pts != nullptr) {
win->Pen(color);
int num_pts = ptaGetCount(pts);
l_float32 x, y;
@ -282,7 +273,7 @@ TEST_F(ResultIteratorTest, EasyTest) {
SetImage("phototest.tif");
// Just run layout analysis.
PageIterator* p_it = api_.AnalyseLayout();
EXPECT_FALSE(p_it == NULL);
EXPECT_FALSE(p_it == nullptr);
// Check iterator position.
EXPECT_TRUE(p_it->IsAtBeginningOf(tesseract::RIL_BLOCK));
// This should be a single block.
@ -296,7 +287,7 @@ TEST_F(ResultIteratorTest, EasyTest) {
char* result = api_.GetUTF8Text();
ocr_text_ = result;
delete [] result;
delete[] result;
ResultIterator* r_it = api_.GetIterator();
// The images should rebuild almost perfectly.
LOG(INFO) << "Verifying image rebuilds 2a (resultiterator)";
@ -330,15 +321,15 @@ TEST_F(ResultIteratorTest, EasyTest) {
do {
bool bold, italic, underlined, monospace, serif, smallcaps;
int pointsize, font_id;
const char* font = r_it->WordFontAttributes(&bold, &italic, &underlined,
&monospace, &serif, &smallcaps,
&pointsize, &font_id);
const char* font =
r_it->WordFontAttributes(&bold, &italic, &underlined, &monospace,
&serif, &smallcaps, &pointsize, &font_id);
float confidence = r_it->Confidence(tesseract::RIL_WORD);
EXPECT_GE(confidence, 80.0f);
char* word_str = r_it->GetUTF8Text(tesseract::RIL_WORD);
VLOG(1) << StringPrintf("Word %s in font %s, id %d, size %d, conf %g",
word_str, font, font_id, pointsize, confidence);
delete [] word_str;
delete[] word_str;
EXPECT_FALSE(bold);
EXPECT_FALSE(italic);
EXPECT_FALSE(underlined);
@ -358,7 +349,7 @@ TEST_F(ResultIteratorTest, ComplexTest) {
SetImage("8087_054.3B.tif");
// Just run layout analysis.
PageIterator* it = api_.AnalyseLayout();
EXPECT_FALSE(it == NULL);
EXPECT_FALSE(it == nullptr);
// The images should rebuild almost perfectly.
VerifyRebuilds(400, 400, 400, 400, 650, it);
delete it;
@ -369,7 +360,7 @@ TEST_F(ResultIteratorTest, GreyTest) {
SetImage("8087_054.3G.tif");
// Just run layout analysis.
PageIterator* it = api_.AnalyseLayout();
EXPECT_FALSE(it == NULL);
EXPECT_FALSE(it == nullptr);
// The images should rebuild almost perfectly.
VerifyRebuilds(600, 600, 600, 600, 600, it);
delete it;
@ -379,7 +370,7 @@ TEST_F(ResultIteratorTest, GreyTest) {
TEST_F(ResultIteratorTest, SmallCapDropCapTest) {
SetImage("8071_093.3B.tif");
char* result = api_.GetUTF8Text();
delete [] result;
delete[] result;
ResultIterator* r_it = api_.GetIterator();
// Iterate over the words.
int found_dropcaps = 0;
@ -388,26 +379,23 @@ TEST_F(ResultIteratorTest, SmallCapDropCapTest) {
do {
bool bold, italic, underlined, monospace, serif, smallcaps;
int pointsize, font_id;
r_it->WordFontAttributes(&bold, &italic, &underlined,
&monospace, &serif, &smallcaps,
&pointsize, &font_id);
r_it->WordFontAttributes(&bold, &italic, &underlined, &monospace, &serif,
&smallcaps, &pointsize, &font_id);
char* word_str = r_it->GetUTF8Text(tesseract::RIL_WORD);
if (word_str != NULL) {
VLOG(1) << StringPrintf("Word %s is %s",
word_str, smallcaps ? "Smallcaps" : "Normal");
if (word_str != nullptr) {
VLOG(1) << StringPrintf("Word %s is %s", word_str,
smallcaps ? "Smallcaps" : "Normal");
if (r_it->SymbolIsDropcap()) {
++found_dropcaps;
}
if (strcmp(word_str, "SHE") == 0 ||
strcmp(word_str, "MOPED") == 0 ||
if (strcmp(word_str, "SHE") == 0 || strcmp(word_str, "MOPED") == 0 ||
strcmp(word_str, "RALPH") == 0 ||
strcmp(word_str, "KINNEY") == 0 || // Not working yet.
strcmp(word_str, "BENNETT") == 0) {
EXPECT_TRUE(smallcaps) << word_str;
++found_smallcaps;
} else {
if (smallcaps)
++false_positives;
if (smallcaps) ++false_positives;
}
// No symbol other than the first of any word should be dropcap.
ResultIterator s_it(*r_it);
@ -415,13 +403,13 @@ TEST_F(ResultIteratorTest, SmallCapDropCapTest) {
!s_it.IsAtBeginningOf(tesseract::RIL_WORD)) {
if (s_it.SymbolIsDropcap()) {
char* sym_str = s_it.GetUTF8Text(tesseract::RIL_SYMBOL);
LOG(ERROR) << StringPrintf("Symbol %s of word %s is dropcap",
sym_str, word_str);
delete [] sym_str;
LOG(ERROR) << StringPrintf("Symbol %s of word %s is dropcap", sym_str,
word_str);
delete[] sym_str;
}
EXPECT_FALSE(s_it.SymbolIsDropcap());
}
delete [] word_str;
delete[] word_str;
}
} while (r_it->Next(tesseract::RIL_WORD));
delete r_it;
@ -454,12 +442,12 @@ TEST_F(ResultIteratorTest, SubSuperTest) {
++found_subs;
} else if (r_it->SymbolIsSuperscript()) {
result = r_it->GetUTF8Text(tesseract::RIL_SYMBOL);
if (strchr(kAllowedSupers, result[0]) == NULL) {
if (strchr(kAllowedSupers, result[0]) == nullptr) {
char* word = r_it->GetUTF8Text(tesseract::RIL_WORD);
LOG(ERROR) << StringPrintf("Char %s in word %s is unexpected super!",
result, word);
delete [] word;
EXPECT_TRUE(strchr(kAllowedSupers, result[0]) != NULL);
EXPECT_TRUE(strchr(kAllowedSupers, result[0]) != nullptr);
}
delete [] result;
++found_supers;
@ -486,12 +474,13 @@ static const StrongScriptDirection dZ = DIR_MIX;
// interpreted appropriately in different contexts.
TEST_F(ResultIteratorTest, DualStartTextlineOrderTest) {
StrongScriptDirection word_dirs[] = {dL, dL, dN, dL, dN, dR, dR, dR};
int reading_order_rtl_context[] = {
7, 6, 5, 4, ResultIterator::kMinorRunStart, 0, 1, 2, 3,
ResultIterator::kMinorRunEnd};
int reading_order_ltr_context[] = {
0, 1, 2, 3, 4, ResultIterator::kMinorRunStart, 7, 6, 5,
ResultIterator::kMinorRunEnd};
int reading_order_rtl_context[] = {7, 6, 5, 4, ResultIterator::kMinorRunStart,
0, 1, 2, 3, ResultIterator::kMinorRunEnd};
int reading_order_ltr_context[] = {0, 1,
2, 3,
4, ResultIterator::kMinorRunStart,
7, 6,
5, ResultIterator::kMinorRunEnd};
ExpectTextlineReadingOrder(true, word_dirs, ABSL_ARRAYSIZE(word_dirs),
reading_order_ltr_context,
@ -510,8 +499,8 @@ TEST_F(ResultIteratorTest, LeftwardTextlineOrderTest) {
// In the strange event that this shows up in an RTL paragraph, nonetheless
// just presume the whole thing is an LTR line.
int reading_order_rtl_context[] = {
ResultIterator::kMinorRunStart, 0, 1, 2, 3, 4, 5, 6, 7,
ResultIterator::kMinorRunEnd};
ResultIterator::kMinorRunStart, 0, 1, 2, 3, 4, 5, 6, 7,
ResultIterator::kMinorRunEnd};
ExpectTextlineReadingOrder(true, word_dirs, ABSL_ARRAYSIZE(word_dirs),
reading_order_ltr_context,
@ -553,28 +542,28 @@ TEST_F(ResultIteratorTest, TextlineOrderSanityCheck) {
TEST_F(ResultIteratorTest, NonNullChoicesTest) {
SetImage("5318c4b679264.jpg");
char* result = api_.GetUTF8Text();
delete [] result;
delete[] result;
ResultIterator* r_it = api_.GetIterator();
// Iterate over the words.
do {
char* word_str = r_it->GetUTF8Text(tesseract::RIL_WORD);
if (word_str != NULL) {
if (word_str != nullptr) {
VLOG(1) << StringPrintf("Word %s:", word_str);
ResultIterator s_it = *r_it;
do {
tesseract::ChoiceIterator c_it(s_it);
do {
const char* char_str = c_it.GetUTF8Text();
if (char_str == NULL)
if (char_str == nullptr)
VLOG(1) << "Null char choice";
else
VLOG(1) << "Char choice " << char_str;
CHECK(char_str != nullptr);
} while (c_it.Next());
} while (!s_it.IsAtFinalElement(tesseract::RIL_WORD,
tesseract::RIL_SYMBOL) &&
s_it.Next(tesseract::RIL_SYMBOL));
delete [] word_str;
} while (
!s_it.IsAtFinalElement(tesseract::RIL_WORD, tesseract::RIL_SYMBOL) &&
s_it.Next(tesseract::RIL_SYMBOL));
delete[] word_str;
}
} while (r_it->Next(tesseract::RIL_WORD));
delete r_it;
@ -586,12 +575,12 @@ TEST_F(ResultIteratorTest, NonNullConfidencesTest) {
// Force recognition so we can used the result iterator.
// We don't care about the return from GetUTF8Text.
char* result = api_.GetUTF8Text();
delete [] result;
delete[] result;
ResultIterator* r_it = api_.GetIterator();
// Iterate over the words.
do {
char* word_str = r_it->GetUTF8Text(tesseract::RIL_WORD);
if (word_str != NULL) {
if (word_str != nullptr) {
EXPECT_FALSE(r_it->Empty(tesseract::RIL_WORD));
EXPECT_FALSE(r_it->Empty(tesseract::RIL_SYMBOL));
ResultIterator s_it = *r_it;
@ -599,13 +588,13 @@ TEST_F(ResultIteratorTest, NonNullConfidencesTest) {
const char* char_str = s_it.GetUTF8Text(tesseract::RIL_SYMBOL);
CHECK(char_str != nullptr);
float confidence = s_it.Confidence(tesseract::RIL_SYMBOL);
VLOG(1) << StringPrintf("Char %s has confidence %g\n",
char_str, confidence);
delete [] char_str;
} while (!s_it.IsAtFinalElement(tesseract::RIL_WORD,
tesseract::RIL_SYMBOL) &&
s_it.Next(tesseract::RIL_SYMBOL));
delete [] word_str;
VLOG(1) << StringPrintf("Char %s has confidence %g\n", char_str,
confidence);
delete[] char_str;
} while (
!s_it.IsAtFinalElement(tesseract::RIL_WORD, tesseract::RIL_SYMBOL) &&
s_it.Next(tesseract::RIL_SYMBOL));
delete[] word_str;
} else {
VLOG(1) << "Empty word found";
}

View File

@ -7,8 +7,7 @@ namespace {
class ScanutilsTest : public ::testing::Test {
protected:
string TestDataNameToPath(const string& name) {
return file::JoinPath(FLAGS_test_srcdir,
"testdata/" + name);
return file::JoinPath(FLAGS_test_srcdir, "testdata/" + name);
}
};
@ -32,15 +31,13 @@ TEST_F(ScanutilsTest, DoesScanf) {
int r1 = fscanf(fp1, "%f %f %f %f", &f1[0], &f1[1], &f1[2], &f1[3]);
int r2 = tfscanf(fp2, "%f %f %f %f", &f2[0], &f2[1], &f2[2], &f2[3]);
EXPECT_EQ(r1, r2);
for (int i = 0; i < kNumFloats; ++i)
EXPECT_FLOAT_EQ(f1[i], f2[i]);
for (int i = 0; i < kNumFloats; ++i) EXPECT_FLOAT_EQ(f1[i], f2[i]);
const int kNumInts = 5;
int i1[kNumInts], i2[kNumInts];
r1 = fscanf(fp1, "%d %d %d %d %i", &i1[0], &i1[1], &i1[2], &i1[3], &i1[4]);
r2 = tfscanf(fp2, "%d %d %d %d %i", &i2[0], &i2[1], &i2[2], &i2[3], &i2[4]);
EXPECT_EQ(r1, r2);
for (int i = 0; i < kNumInts; ++i)
EXPECT_EQ(i1[i], i2[i]);
for (int i = 0; i < kNumInts; ++i) EXPECT_EQ(i1[i], i2[i]);
const int kStrLen = 1024;
char s1[kStrLen];
char s2[kStrLen];
@ -68,11 +65,10 @@ TEST_F(ScanutilsTest, DoesScanf) {
r1 = fscanf(fp1, "%f %f %f %f", &f1[0], &f1[1], &f1[2], &f1[3]);
r2 = tfscanf(fp2, "%f %f %f %f", &f2[0], &f2[1], &f2[2], &f2[3]);
EXPECT_EQ(r1, r2);
for (int i = 0; i < kNumFloats; ++i)
EXPECT_FLOAT_EQ(f1[i], f2[i]);
for (int i = 0; i < kNumFloats; ++i) EXPECT_FLOAT_EQ(f1[i], f2[i]);
// Test the * for field suppression.
r1 = fscanf(fp1, "%d %*s %*d %*f %*f", &i1[0]);
r2 = tfscanf(fp2,"%d %*s %*d %*f %*f", &i2[0]);
r2 = tfscanf(fp2, "%d %*s %*d %*f %*f", &i2[0]);
EXPECT_EQ(r1, r2);
EXPECT_EQ(i1[0], i2[0]);
// We should still see the next value and no phantoms.
@ -84,4 +80,3 @@ TEST_F(ScanutilsTest, DoesScanf) {
}
} // namespace

View File

@ -39,8 +39,7 @@ static void Expect352(int font_id, const Shape& shape) {
}
// The fixture for testing Shape.
class ShapeTest : public testing::Test {
};
class ShapeTest : public testing::Test {};
// Tests that a Shape works as expected for all the basic functions.
TEST_F(ShapeTest, BasicTest) {
@ -51,7 +50,7 @@ TEST_F(ShapeTest, BasicTest) {
// It should still work after file I/O.
string filename = TmpNameToPath("shapefile");
FILE* fp = fopen(filename.c_str(), "wb");
EXPECT_TRUE(fp != NULL);
EXPECT_TRUE(fp != nullptr);
EXPECT_TRUE(shape1.Serialize(fp));
fclose(fp);
TFile tfp;
@ -97,8 +96,7 @@ TEST_F(ShapeTest, AddShapeTest) {
}
// The fixture for testing Shape.
class ShapeTableTest : public testing::Test {
};
class ShapeTableTest : public testing::Test {};
// Tests that a Shape works as expected for all the basic functions.
TEST_F(ShapeTableTest, FullTest) {
@ -140,7 +138,7 @@ TEST_F(ShapeTableTest, FullTest) {
EXPECT_EQ(0, st.MasterDestinationIndex(1));
EXPECT_EQ(0, st.MasterDestinationIndex(2));
ShapeTable st2;
st2.AppendMasterShapes(st, NULL);
st2.AppendMasterShapes(st, nullptr);
EXPECT_EQ(1, st.NumMasterShapes());
EXPECT_EQ(1, st2.NumShapes());
EXPECT_TRUE(st2.MutableShape(0)->IsEqualUnichars(&shape1));
@ -148,5 +146,3 @@ TEST_F(ShapeTableTest, FullTest) {
}
} // namespace

View File

@ -9,15 +9,15 @@
// See the License for the specific language governing permissions and
// limitations under the License.
#include "statistc.h"
#include "genericvector.h"
#include "kdpair.h"
#include "statistc.h"
#include "include_gunit.h"
namespace {
const int kTestData[] = { 2, 0, 12, 1, 1, 2, 10, 1, 0, 0, 0, 2, 0, 4, 1, 1 };
const int kTestData[] = {2, 0, 12, 1, 1, 2, 10, 1, 0, 0, 0, 2, 0, 4, 1, 1};
class STATSTest : public testing::Test {
public:
@ -27,8 +27,7 @@ class STATSTest : public testing::Test {
stats_.add(i, kTestData[i]);
}
void TearDown() {
}
void TearDown() {}
STATS stats_;
};

View File

@ -1,9 +1,9 @@
#include "tesseract/lstm/stridemap.h"
using tesseract::FlexDimensions;
using tesseract::FD_BATCH;
using tesseract::FD_HEIGHT;
using tesseract::FD_WIDTH;
using tesseract::FlexDimensions;
using tesseract::StrideMap;
namespace {
@ -104,8 +104,8 @@ TEST_F(StridemapTest, Scaling) {
// Scale x by 2, keeping y the same.
std::vector<int> values_x2 = {0, 1, 4, 5, 8, 9, 12, 13, 17, 18,
22, 23, 27, 28, 32, 33, 36, 37, 40, 41,
44, 45, 48, 49, 53, 54, 58, 59};
22, 23, 27, 28, 32, 33, 36, 37, 40, 41,
44, 45, 48, 49, 53, 54, 58, 59};
StrideMap test_map(stride_map);
test_map.ScaleXY(2, 1);
StrideMap::Index index(test_map);
@ -121,8 +121,8 @@ TEST_F(StridemapTest, Scaling) {
test_map = stride_map;
// Scale y by 2, keeping x the same.
std::vector<int> values_y2 = {0, 1, 2, 3, 12, 13, 14, 15, 16,
17, 18, 19, 20, 21, 32, 33, 34, 35,
36, 37, 38, 39, 48, 49, 50, 51, 52};
17, 18, 19, 20, 21, 32, 33, 34, 35,
36, 37, 38, 39, 48, 49, 50, 51, 52};
test_map.ScaleXY(1, 2);
index.InitToFirst();
pos = 0;
@ -151,7 +151,7 @@ TEST_F(StridemapTest, Scaling) {
test_map = stride_map;
// Reduce Width to 1.
std::vector<int> values_x_to_1 = {0, 4, 8, 12, 17, 22, 27,
32, 36, 40, 44, 48, 53, 58};
32, 36, 40, 44, 48, 53, 58};
test_map.ReduceWidthTo1();
index.InitToFirst();
pos = 0;

View File

@ -23,9 +23,9 @@ namespace {
const char kEngText[] = "the quick brown fox jumps over the lazy dog";
const char kHinText[] = "पिताने विवाह की | हो गई उद्विग्न वह सोचा";
const char kKorText[] =
"이는 것으로 다시 넣을 1234 수는 있지만 선택의 의미는";
const char kArabicText[] = "والفكر والصراع ، بالتأمل والفهم والتحليل ، "
const char kKorText[] = "이는 것으로 다시 넣을 1234 수는 있지만 선택의 의미는";
const char kArabicText[] =
"والفكر والصراع ، بالتأمل والفهم والتحليل ، "
"بالعلم والفن ، وأخيرا بالضحك أوبالبكاء ، ";
const char kMixedText[] = "والفكر 123 والصراع abc";
@ -40,8 +40,7 @@ class StringRendererTest : public ::testing::Test {
protected:
static void SetUpTestCase() {
l_chooseDisplayProg(L_DISPLAY_WITH_XZGV);
FLAGS_fonts_dir = file::JoinPath(
FLAGS_test_srcdir, "testdata");
FLAGS_fonts_dir = file::JoinPath(FLAGS_test_srcdir, "testdata");
FLAGS_fontconfig_tmpdir = FLAGS_test_tmpdir;
FLAGS_use_only_legacy_fonts = false;
// Needed for reliable heapchecking of pango layout structures.
@ -66,10 +65,10 @@ class StringRendererTest : public ::testing::Test {
TEST_F(StringRendererTest, DoesRenderToImage) {
renderer_.reset(new StringRenderer("Verdana 10", 600, 600));
Pix *pix = NULL;
Pix* pix = nullptr;
EXPECT_EQ(strlen(kEngText),
renderer_->RenderToImage(kEngText, strlen(kEngText), &pix));
EXPECT_TRUE(pix != NULL);
EXPECT_TRUE(pix != nullptr);
EXPECT_GT(renderer_->GetBoxes().size(), 0);
DisplayClusterBoxes(pix);
pixDestroy(&pix);
@ -92,7 +91,7 @@ TEST_F(StringRendererTest, DoesRenderToImage) {
renderer_.reset(new StringRenderer("Arab 10", 600, 600));
EXPECT_EQ(strlen(kArabicText),
renderer_->RenderToImage(kArabicText, strlen(kArabicText), &pix));
EXPECT_TRUE(pix != NULL);
EXPECT_TRUE(pix != nullptr);
EXPECT_GT(renderer_->GetBoxes().size(), 0);
DisplayClusterBoxes(pix);
pixDestroy(&pix);
@ -101,7 +100,7 @@ TEST_F(StringRendererTest, DoesRenderToImage) {
renderer_.reset(new StringRenderer("Arab 10", 600, 600));
EXPECT_EQ(strlen(kMixedText),
renderer_->RenderToImage(kMixedText, strlen(kMixedText), &pix));
EXPECT_TRUE(pix != NULL);
EXPECT_TRUE(pix != nullptr);
EXPECT_GT(renderer_->GetBoxes().size(), 0);
DisplayClusterBoxes(pix);
pixDestroy(&pix);
@ -112,10 +111,10 @@ TEST_F(StringRendererTest, DoesRenderToImageWithUnderline) {
// Underline all words but NOT intervening spaces.
renderer_->set_underline_start_prob(1.0);
renderer_->set_underline_continuation_prob(0);
Pix *pix = NULL;
Pix* pix = nullptr;
EXPECT_EQ(strlen(kEngText),
renderer_->RenderToImage(kEngText, strlen(kEngText), &pix));
EXPECT_TRUE(pix != NULL);
EXPECT_TRUE(pix != nullptr);
EXPECT_GT(renderer_->GetBoxes().size(), 0);
DisplayClusterBoxes(pix);
pixDestroy(&pix);
@ -126,7 +125,7 @@ TEST_F(StringRendererTest, DoesRenderToImageWithUnderline) {
renderer_->set_underline_continuation_prob(1.0);
EXPECT_EQ(strlen(kEngText),
renderer_->RenderToImage(kEngText, strlen(kEngText), &pix));
EXPECT_TRUE(pix != NULL);
EXPECT_TRUE(pix != nullptr);
EXPECT_GT(renderer_->GetBoxes().size(), 0);
DisplayClusterBoxes(pix);
pixDestroy(&pix);
@ -137,7 +136,7 @@ TEST_F(StringRendererTest, DoesRenderToImageWithUnderline) {
renderer_->set_underline_continuation_prob(0.5);
EXPECT_EQ(strlen(kEngText),
renderer_->RenderToImage(kEngText, strlen(kEngText), &pix));
EXPECT_TRUE(pix != NULL);
EXPECT_TRUE(pix != nullptr);
EXPECT_GT(renderer_->GetBoxes().size(), 0);
DisplayClusterBoxes(pix);
pixDestroy(&pix);
@ -147,10 +146,10 @@ TEST_F(StringRendererTest, DoesHandleNewlineCharacters) {
const char kRawText[] = "\n\n\n A \nB \nC \n\n\n";
const char kStrippedText[] = " A B C "; // text with newline chars removed
renderer_.reset(new StringRenderer("Verdana 10", 600, 600));
Pix *pix = NULL;
Pix* pix = nullptr;
EXPECT_EQ(strlen(kRawText),
renderer_->RenderToImage(kRawText, strlen(kRawText), &pix));
EXPECT_TRUE(pix != NULL);
EXPECT_TRUE(pix != nullptr);
// 3 characters + 4 spaces => 7 boxes
EXPECT_EQ(7, renderer_->GetBoxes().size());
// Verify the text content of the boxchars
@ -166,15 +165,15 @@ TEST_F(StringRendererTest, DoesRenderLigatures) {
renderer_.reset(new StringRenderer("Arab 12", 600, 250));
const char kArabicLigature[] = "لا";
Pix* pix = NULL;
EXPECT_EQ(strlen(kArabicLigature),
renderer_->RenderToImage(kArabicLigature, strlen(kArabicLigature),
&pix));
EXPECT_TRUE(pix != NULL);
Pix* pix = nullptr;
EXPECT_EQ(
strlen(kArabicLigature),
renderer_->RenderToImage(kArabicLigature, strlen(kArabicLigature), &pix));
EXPECT_TRUE(pix != nullptr);
EXPECT_GT(renderer_->GetBoxes().size(), 0);
const std::vector<BoxChar*>& boxes = renderer_->GetBoxes();
EXPECT_EQ(1, boxes.size());
EXPECT_TRUE(boxes[0]->box() != NULL);
EXPECT_TRUE(boxes[0]->box() != nullptr);
EXPECT_STREQ(kArabicLigature, boxes[0]->ch().c_str());
DisplayClusterBoxes(pix);
pixDestroy(&pix);
@ -186,19 +185,17 @@ TEST_F(StringRendererTest, DoesRenderLigatures) {
pixDestroy(&pix);
}
static int FindBoxCharXCoord(const std::vector<BoxChar*>& boxchars,
const string& ch) {
for (int i = 0; i < boxchars.size(); ++i) {
if (boxchars[i]->ch() == ch)
return boxchars[i]->box()->x;
if (boxchars[i]->ch() == ch) return boxchars[i]->box()->x;
}
return kint32max;
}
TEST_F(StringRendererTest, ArabicBoxcharsInLTROrder) {
renderer_.reset(new StringRenderer("Arab 10", 600, 600));
Pix* pix = NULL;
Pix* pix = nullptr;
// Arabic letters should be in decreasing x-coordinates
const char kArabicWord[] = "\u0644\u0627\u0641\u0643\u0631";
const string kRevWord = "\u0631\u0643\u0641\u0627\u0644";
@ -223,14 +220,14 @@ TEST_F(StringRendererTest, ArabicBoxcharsInLTROrder) {
TEST_F(StringRendererTest, DoesOutputBoxcharsInReadingOrder) {
renderer_.reset(new StringRenderer("Arab 10", 600, 600));
Pix *pix = NULL;
Pix* pix = nullptr;
// Arabic letters should be in decreasing x-coordinates
const char kArabicWord[] = "والفكر";
renderer_->RenderToImage(kArabicWord, strlen(kArabicWord), &pix);
EXPECT_GT(renderer_->GetBoxes().size(), 0);
const std::vector<BoxChar*>& boxchars = renderer_->GetBoxes();
for (int i = 0; i < boxchars.size() - 1; ++i) {
EXPECT_GT(boxchars[i]->box()->x, boxchars[i+1]->box()->x)
EXPECT_GT(boxchars[i]->box()->x, boxchars[i + 1]->box()->x)
<< boxchars[i]->ch();
}
pixDestroy(&pix);
@ -241,7 +238,7 @@ TEST_F(StringRendererTest, DoesOutputBoxcharsInReadingOrder) {
renderer_->RenderToImage(kEnglishWord, strlen(kEnglishWord), &pix);
EXPECT_EQ(boxchars.size(), strlen(kEnglishWord));
for (int i = 0; i < boxchars.size() - 1; ++i) {
EXPECT_LT(boxchars[i]->box()->x, boxchars[i+1]->box()->x)
EXPECT_LT(boxchars[i]->box()->x, boxchars[i + 1]->box()->x)
<< boxchars[i]->ch();
}
pixDestroy(&pix);
@ -255,9 +252,8 @@ TEST_F(StringRendererTest, DoesOutputBoxcharsInReadingOrder) {
pixDestroy(&pix);
}
TEST_F(StringRendererTest, DoesRenderVerticalText) {
Pix* pix = NULL;
Pix* pix = nullptr;
renderer_.reset(new StringRenderer("UnBatang 10", 600, 600));
renderer_->set_vertical_text(true);
EXPECT_EQ(strlen(kKorText),
@ -271,21 +267,22 @@ TEST_F(StringRendererTest, DoesRenderVerticalText) {
// appropriate page numbers.
TEST_F(StringRendererTest, DoesKeepAllImageBoxes) {
renderer_.reset(new StringRenderer("Verdana 10", 600, 600));
Pix *pix = NULL;
Pix* pix = nullptr;
int num_boxes_per_page = 0;
const int kNumTrials = 2;
for (int i = 0; i < kNumTrials; ++i) {
EXPECT_EQ(strlen(kEngText),
renderer_->RenderToImage(kEngText, strlen(kEngText), &pix));
EXPECT_TRUE(pix != NULL);
EXPECT_TRUE(pix != nullptr);
pixDestroy(&pix);
EXPECT_GT(renderer_->GetBoxes().size(), 0);
if (!num_boxes_per_page) {
num_boxes_per_page = renderer_->GetBoxes().size();
} else {
EXPECT_EQ((i+1) * num_boxes_per_page, renderer_->GetBoxes().size());
EXPECT_EQ((i + 1) * num_boxes_per_page, renderer_->GetBoxes().size());
}
for (int j = i * num_boxes_per_page; j < (i+1) * num_boxes_per_page; ++j) {
for (int j = i * num_boxes_per_page; j < (i + 1) * num_boxes_per_page;
++j) {
EXPECT_EQ(i, renderer_->GetBoxes()[j]->page());
}
}
@ -293,7 +290,7 @@ TEST_F(StringRendererTest, DoesKeepAllImageBoxes) {
TEST_F(StringRendererTest, DoesClearBoxes) {
renderer_.reset(new StringRenderer("Verdana 10", 600, 600));
Pix *pix = NULL;
Pix* pix = nullptr;
EXPECT_EQ(strlen(kEngText),
renderer_->RenderToImage(kEngText, strlen(kEngText), &pix));
pixDestroy(&pix);
@ -310,7 +307,7 @@ TEST_F(StringRendererTest, DoesClearBoxes) {
TEST_F(StringRendererTest, DoesLigatureTextForRendering) {
renderer_.reset(new StringRenderer("Verdana 10", 600, 600));
renderer_->set_add_ligatures(true);
Pix *pix = NULL;
Pix* pix = nullptr;
EXPECT_EQ(strlen(kEngNonLigatureText),
renderer_->RenderToImage(kEngNonLigatureText,
strlen(kEngNonLigatureText), &pix));
@ -323,7 +320,7 @@ TEST_F(StringRendererTest, DoesLigatureTextForRendering) {
TEST_F(StringRendererTest, DoesRetainInputLigatureForRendering) {
renderer_.reset(new StringRenderer("Verdana 10", 600, 600));
Pix *pix = NULL;
Pix* pix = nullptr;
EXPECT_EQ(strlen(kEngLigatureText),
renderer_->RenderToImage(kEngLigatureText, strlen(kEngLigatureText),
&pix));
@ -346,7 +343,7 @@ TEST_F(StringRendererTest, DoesStripUnrenderableWords) {
TEST_F(StringRendererTest, DoesRenderWordBoxes) {
renderer_.reset(new StringRenderer("Verdana 10", 600, 600));
renderer_->set_output_word_boxes(true);
Pix *pix = NULL;
Pix* pix = nullptr;
EXPECT_EQ(strlen(kEngText),
renderer_->RenderToImage(kEngText, strlen(kEngText), &pix));
pixDestroy(&pix);
@ -361,7 +358,7 @@ TEST_F(StringRendererTest, DoesRenderWordBoxes) {
EXPECT_EQ(words[i / 2], boxchars[i]->ch());
if (i < boxchars.size() - 1) {
EXPECT_EQ(" ", boxchars[i + 1]->ch());
EXPECT_TRUE(boxchars[i + 1]->box() == NULL);
EXPECT_TRUE(boxchars[i + 1]->box() == nullptr);
}
}
}
@ -369,7 +366,7 @@ TEST_F(StringRendererTest, DoesRenderWordBoxes) {
TEST_F(StringRendererTest, DoesRenderWordBoxesFromMultiLineText) {
renderer_.reset(new StringRenderer("Verdana 10", 600, 600));
renderer_->set_output_word_boxes(true);
Pix *pix = NULL;
Pix* pix = nullptr;
const char kMultlineText[] = "the quick brown fox\njumps over the lazy dog";
EXPECT_EQ(strlen(kMultlineText),
renderer_->RenderToImage(kMultlineText, strlen(kEngText), &pix));
@ -386,7 +383,7 @@ TEST_F(StringRendererTest, DoesRenderWordBoxesFromMultiLineText) {
EXPECT_EQ(words[i / 2], boxchars[i]->ch());
if (i < boxchars.size() - 1) {
EXPECT_EQ(" ", boxchars[i + 1]->ch());
EXPECT_TRUE(boxchars[i + 1]->box() == NULL);
EXPECT_TRUE(boxchars[i + 1]->box() == nullptr);
}
}
}
@ -396,13 +393,12 @@ TEST_F(StringRendererTest, DoesRenderAllFontsToImage) {
int offset = 0;
string font_used;
do {
Pix* pix = NULL;
Pix* pix = nullptr;
font_used.clear();
offset += renderer_->RenderAllFontsToImage(1.0, kEngText + offset,
strlen(kEngText + offset),
&font_used, &pix);
offset += renderer_->RenderAllFontsToImage(
1.0, kEngText + offset, strlen(kEngText + offset), &font_used, &pix);
if (offset < strlen(kEngText)) {
EXPECT_TRUE(pix != NULL);
EXPECT_TRUE(pix != nullptr);
EXPECT_STRNE("", font_used.c_str());
}
if (FLAGS_display) pixDisplay(pix, 0, 0);
@ -414,7 +410,7 @@ TEST_F(StringRendererTest, DoesNotRenderWordJoiner) {
renderer_.reset(new StringRenderer("Verdana 10", 500, 200));
const string word = "A- -B C-D A BC";
const string joined_word = StringRenderer::InsertWordJoiners(word);
Pix* pix = NULL;
Pix* pix = nullptr;
renderer_->RenderToImage(joined_word.c_str(), joined_word.length(), &pix);
pixDestroy(&pix);
const std::vector<BoxChar*>& boxchars = renderer_->GetBoxes();
@ -431,8 +427,9 @@ TEST_F(StringRendererTest, DoesDropUncoveredChars) {
renderer_->set_drop_uncovered_chars(true);
const string kWord = "office";
const string kCleanWord = "oice";
Pix* pix = NULL;
EXPECT_FALSE(renderer_->font().CanRenderString(kWord.c_str(), kWord.length()));
Pix* pix = nullptr;
EXPECT_FALSE(
renderer_->font().CanRenderString(kWord.c_str(), kWord.length()));
EXPECT_FALSE(renderer_->font().CoversUTF8Text(kWord.c_str(), kWord.length()));
int offset = renderer_->RenderToImage(kWord.c_str(), kWord.length(), &pix);
pixDestroy(&pix);

View File

@ -25,22 +25,22 @@ namespace {
class TestableTableFinder : public tesseract::TableFinder {
public:
using TableFinder::set_global_median_xheight;
using TableFinder::set_global_median_blob_width;
using TableFinder::set_global_median_ledding;
using TableFinder::GapInXProjection;
using TableFinder::HasLeaderAdjacent;
using TableFinder::InsertLeaderPartition;
using TableFinder::InsertTextPartition;
using TableFinder::set_global_median_blob_width;
using TableFinder::set_global_median_ledding;
using TableFinder::set_global_median_xheight;
using TableFinder::SplitAndInsertFragmentedTextPartition;
using TableFinder::HasLeaderAdjacent;
void ExpectPartition(const TBOX& box) {
tesseract::ColPartitionGridSearch gsearch(&fragmented_text_grid_);
gsearch.SetUniqueMode(true);
gsearch.StartFullSearch();
ColPartition* part = NULL;
ColPartition* part = nullptr;
bool found = false;
while ((part = gsearch.NextFullSearch()) != NULL) {
while ((part = gsearch.NextFullSearch()) != nullptr) {
if (part->bounding_box().left() == box.left() &&
part->bounding_box().bottom() == box.bottom() &&
part->bounding_box().right() == box.right() &&
@ -54,9 +54,9 @@ class TestableTableFinder : public tesseract::TableFinder {
tesseract::ColPartitionGridSearch gsearch(&fragmented_text_grid_);
gsearch.SetUniqueMode(true);
gsearch.StartFullSearch();
ColPartition* part = NULL;
ColPartition* part = nullptr;
int count = 0;
while ((part = gsearch.NextFullSearch()) != NULL) {
while ((part = gsearch.NextFullSearch()) != nullptr) {
++count;
}
EXPECT_EQ(expected_count, count);
@ -75,10 +75,9 @@ class TableFinderTest : public testing::Test {
}
void TearDown() {
if (partition_.get() != NULL)
partition_->DeleteBoxes();
if (partition_.get() != nullptr) partition_->DeleteBoxes();
DeletePartitionListBoxes();
finder_.reset(NULL);
finder_.reset(nullptr);
}
void MakePartition(int x_min, int y_min, int x_max, int y_max) {
@ -87,12 +86,11 @@ class TableFinderTest : public testing::Test {
void MakePartition(int x_min, int y_min, int x_max, int y_max,
int first_column, int last_column) {
if (partition_.get() != NULL)
partition_->DeleteBoxes();
if (partition_.get() != nullptr) partition_->DeleteBoxes();
TBOX box;
box.set_to_given_coords(x_min, y_min, x_max, y_max);
partition_.reset(ColPartition::FakePartition(box, PT_UNKNOWN,
BRT_UNKNOWN, BTFT_NONE));
partition_.reset(
ColPartition::FakePartition(box, PT_UNKNOWN, BRT_UNKNOWN, BTFT_NONE));
partition_->set_first_column(first_column);
partition_->set_last_column(last_column);
}
@ -119,8 +117,7 @@ class TableFinderTest : public testing::Test {
}
void DeletePartitionListBoxes() {
for (free_boxes_it_.mark_cycle_pt();
!free_boxes_it_.cycled_list();
for (free_boxes_it_.mark_cycle_pt(); !free_boxes_it_.cycled_list();
free_boxes_it_.forward()) {
ColPartition* part = free_boxes_it_.data();
part->DeleteBoxes();
@ -137,30 +134,23 @@ class TableFinderTest : public testing::Test {
TEST_F(TableFinderTest, GapInXProjectionNoGap) {
int data[100];
for (int i = 0; i < 100; ++i)
data[i] = 10;
for (int i = 0; i < 100; ++i) data[i] = 10;
EXPECT_FALSE(finder_->GapInXProjection(data, 100));
}
TEST_F(TableFinderTest, GapInXProjectionEdgeGap) {
int data[100];
for (int i = 0; i < 10; ++i)
data[i] = 2;
for (int i = 10; i < 90; ++i)
data[i] = 10;
for (int i = 90; i < 100; ++i)
data[i] = 2;
for (int i = 0; i < 10; ++i) data[i] = 2;
for (int i = 10; i < 90; ++i) data[i] = 10;
for (int i = 90; i < 100; ++i) data[i] = 2;
EXPECT_FALSE(finder_->GapInXProjection(data, 100));
}
TEST_F(TableFinderTest, GapInXProjectionExists) {
int data[100];
for (int i = 0; i < 10; ++i)
data[i] = 10;
for (int i = 10; i < 90; ++i)
data[i] = 2;
for (int i = 90; i < 100; ++i)
data[i] = 10;
for (int i = 0; i < 10; ++i) data[i] = 10;
for (int i = 10; i < 90; ++i) data[i] = 2;
for (int i = 90; i < 100; ++i) data[i] = 10;
EXPECT_TRUE(finder_->GapInXProjection(data, 100));
}
@ -216,18 +206,18 @@ TEST_F(TableFinderTest, SplitAndInsertFragmentedPartitionsBasicPass) {
all->set_right_margin(100);
TBOX blob_box = part_box;
for (int i = 10; i <= 20; i += 5) {
blob_box.set_left(i+1);
blob_box.set_right(i+4);
blob_box.set_left(i + 1);
blob_box.set_right(i + 4);
all->AddBox(new BLOBNBOX(C_BLOB::FakeBlob(blob_box)));
}
for (int i = 35; i <= 55; i += 5) {
blob_box.set_left(i+1);
blob_box.set_right(i+4);
blob_box.set_left(i + 1);
blob_box.set_right(i + 4);
all->AddBox(new BLOBNBOX(C_BLOB::FakeBlob(blob_box)));
}
for (int i = 80; i <= 95; i += 5) {
blob_box.set_left(i+1);
blob_box.set_right(i+4);
blob_box.set_left(i + 1);
blob_box.set_right(i + 4);
all->AddBox(new BLOBNBOX(C_BLOB::FakeBlob(blob_box)));
}
// TODO(nbeato): Ray's newer code...
@ -256,8 +246,8 @@ TEST_F(TableFinderTest, SplitAndInsertFragmentedPartitionsBasicFail) {
all->set_right_margin(100);
TBOX blob_box = part_box;
for (int i = 10; i <= 95; i += 5) {
blob_box.set_left(i+1);
blob_box.set_right(i+4);
blob_box.set_left(i + 1);
blob_box.set_right(i + 4);
all->AddBox(new BLOBNBOX(C_BLOB::FakeBlob(blob_box)));
}
// TODO(nbeato): Ray's newer code...

View File

@ -34,8 +34,8 @@ class TestableTableRecognizer : public tesseract::TableRecognizer {
class TestableStructuredTable : public tesseract::StructuredTable {
public:
using StructuredTable::CountVerticalIntersections;
using StructuredTable::CountHorizontalIntersections;
using StructuredTable::CountVerticalIntersections;
using StructuredTable::FindLinedStructure;
using StructuredTable::FindWhitespacedColumns;
using StructuredTable::FindWhitespacedStructure;
@ -51,11 +51,11 @@ class TestableStructuredTable : public tesseract::StructuredTable {
}
void ExpectCellX(int x_min, int second, int add, int almost_done, int x_max) {
ASSERT_EQ(0, (almost_done - second) % add);
ASSERT_EQ(0, (almost_done - second) % add);
EXPECT_EQ(3 + (almost_done - second) / add, cell_x_.length());
EXPECT_EQ(x_min, cell_x_.get(0));
EXPECT_EQ(x_max, cell_x_.get(cell_x_.length() - 1));
for (int i = 1; i < cell_x_.length() - 1; ++i) {
for (int i = 1; i < cell_x_.length() - 1; ++i) {
EXPECT_EQ(second + add * (i - 1), cell_x_.get(i));
}
}
@ -63,7 +63,7 @@ class TestableStructuredTable : public tesseract::StructuredTable {
void ExpectSortedX() {
EXPECT_GT(cell_x_.length(), 0);
for (int i = 1; i < cell_x_.length(); ++i) {
EXPECT_LT(cell_x_.get(i-1), cell_x_.get(i));
EXPECT_LT(cell_x_.get(i - 1), cell_x_.get(i));
}
}
};
@ -92,8 +92,8 @@ class SharedTest : public testing::Test {
void InsertPartition(int left, int bottom, int right, int top) {
TBOX box(left, bottom, right, top);
ColPartition* part = ColPartition::FakePartition(box, PT_FLOWING_TEXT,
BRT_TEXT, BTFT_NONE);
ColPartition* part =
ColPartition::FakePartition(box, PT_FLOWING_TEXT, BRT_TEXT, BTFT_NONE);
part->set_median_width(3);
part->set_median_height(3);
text_grid_->InsertBBox(true, true, part);
@ -103,34 +103,30 @@ class SharedTest : public testing::Test {
}
void InsertLines() {
line_box_.set_to_given_coords(100 - line_grid_->gridsize(),
10 - line_grid_->gridsize(),
450 + line_grid_->gridsize(),
50 + line_grid_->gridsize());
for (int i = 10; i <= 50; i += 10)
InsertHorizontalLine(100, 450, i);
for (int i = 100; i <= 450; i += 50)
InsertVerticalLine(i, 10, 50);
line_box_.set_to_given_coords(
100 - line_grid_->gridsize(), 10 - line_grid_->gridsize(),
450 + line_grid_->gridsize(), 50 + line_grid_->gridsize());
for (int i = 10; i <= 50; i += 10) InsertHorizontalLine(100, 450, i);
for (int i = 100; i <= 450; i += 50) InsertVerticalLine(i, 10, 50);
for (int i = 100; i <= 200; i += 20)
InsertHorizontalLine(0, 100, i);
for (int i = 100; i <= 200; i += 20) InsertHorizontalLine(0, 100, i);
}
void InsertHorizontalLine(int left, int right, int y) {
TBOX box(left, y - line_grid_->gridsize(),
right, y + line_grid_->gridsize());
ColPartition* part = ColPartition::FakePartition(box, PT_HORZ_LINE,
BRT_HLINE, BTFT_NONE);
TBOX box(left, y - line_grid_->gridsize(), right,
y + line_grid_->gridsize());
ColPartition* part =
ColPartition::FakePartition(box, PT_HORZ_LINE, BRT_HLINE, BTFT_NONE);
line_grid_->InsertBBox(true, true, part);
tesseract::ColPartition_IT add_it(&allocated_parts_);
add_it.add_after_stay_put(part);
}
void InsertVerticalLine(int x, int bottom, int top) {
TBOX box(x - line_grid_->gridsize(), bottom,
x + line_grid_->gridsize(), top);
ColPartition* part = ColPartition::FakePartition(box, PT_VERT_LINE,
BRT_VLINE, BTFT_NONE);
TBOX box(x - line_grid_->gridsize(), bottom, x + line_grid_->gridsize(),
top);
ColPartition* part =
ColPartition::FakePartition(box, PT_VERT_LINE, BRT_VLINE, BTFT_NONE);
line_grid_->InsertBBox(true, true, part);
tesseract::ColPartition_IT add_it(&allocated_parts_);
@ -273,10 +269,8 @@ TEST_F(StructuredTableTest, CountHorizontalIntersectionsAll) {
}
TEST_F(StructuredTableTest, VerifyLinedTableBasicPass) {
for (int y = 10; y <= 50; y += 10)
table_->InjectCellY(y);
for (int x = 100; x <= 450; x += 50)
table_->InjectCellX(x);
for (int y = 10; y <= 50; y += 10) table_->InjectCellY(y);
for (int x = 100; x <= 450; x += 50) table_->InjectCellX(x);
InsertLines();
InsertCellsInLines();
table_->set_bounding_box(line_box_);
@ -284,10 +278,8 @@ TEST_F(StructuredTableTest, VerifyLinedTableBasicPass) {
}
TEST_F(StructuredTableTest, VerifyLinedTableHorizontalFail) {
for (int y = 10; y <= 50; y += 10)
table_->InjectCellY(y);
for (int x = 100; x <= 450; x += 50)
table_->InjectCellX(x);
for (int y = 10; y <= 50; y += 10) table_->InjectCellY(y);
for (int x = 100; x <= 450; x += 50) table_->InjectCellX(x);
InsertLines();
InsertCellsInLines();
InsertPartition(101, 11, 299, 19);
@ -296,10 +288,8 @@ TEST_F(StructuredTableTest, VerifyLinedTableHorizontalFail) {
}
TEST_F(StructuredTableTest, VerifyLinedTableVerticalFail) {
for (int y = 10; y <= 50; y += 10)
table_->InjectCellY(y);
for (int x = 100; x <= 450; x += 50)
table_->InjectCellX(x);
for (int y = 10; y <= 50; y += 10) table_->InjectCellY(y);
for (int x = 100; x <= 450; x += 50) table_->InjectCellX(x);
InsertLines();
InsertCellsInLines();
InsertPartition(151, 21, 199, 39);

View File

@ -21,12 +21,9 @@ namespace {
class TabVectorTest : public testing::Test {
protected:
void SetUp() {
vector_.reset();
}
void SetUp() { vector_.reset(); }
void TearDown() {
}
void TearDown() {}
void MakeSimpleTabVector(int x1, int y1, int x2, int y2) {
vector_.reset(new TabVector());
@ -60,7 +57,7 @@ TEST_F(TabVectorTest, XAtY45DegreeSlopeInRangeExact) {
}
TEST_F(TabVectorTest, XAtYVerticalInRangeExact) {
const int x = 120; // Arbitrary choice
const int x = 120; // Arbitrary choice
MakeSimpleTabVector(x, 0, x, 100);
for (int y = 0; y <= 100; ++y) {
int result_x = vector_->XAtY(y);
@ -69,7 +66,7 @@ TEST_F(TabVectorTest, XAtYVerticalInRangeExact) {
}
TEST_F(TabVectorTest, XAtYHorizontal) {
const int y = 76; // arbitrary
const int y = 76; // arbitrary
MakeSimpleTabVector(0, y, 100, y);
EXPECT_EQ(0, vector_->XAtY(y));
// TODO(nbeato): What's the failure condition?
@ -93,13 +90,13 @@ TEST_F(TabVectorTest, XAtYLargeNumbers) {
// Assume a document is 800 DPI,
// the width of a page is 10 inches across (8000 pixels), and
// the height of the page is 15 inches (12000 pixels).
MakeSimpleTabVector(7804, 504, 7968, 11768); // Arbitrary for vertical line
int x = vector_->XAtY(6136); // test mid point
MakeSimpleTabVector(7804, 504, 7968, 11768); // Arbitrary for vertical line
int x = vector_->XAtY(6136); // test mid point
EXPECT_EQ(7886, x);
}
TEST_F(TabVectorTest, XAtYHorizontalInRangeExact) {
const int y = 120; // Arbitrary choice
const int y = 120; // Arbitrary choice
MakeSimpleTabVector(50, y, 150, y);
int x = vector_->XAtY(y);
@ -129,4 +126,4 @@ TEST_F(TabVectorTest, XYFlip) {
EXPECT_EQ(3, vector_->endpt().y());
}
} // namespace
} // namespace

View File

@ -25,8 +25,7 @@ class TatweelTest : public ::testing::Test {
}
string TestDataNameToPath(const string& name) {
return file::JoinPath(FLAGS_test_srcdir,
"testdata/" + name);
return file::JoinPath(FLAGS_test_srcdir, "testdata/" + name);
}
UNICHARSET unicharset_;
};

View File

@ -20,29 +20,26 @@ using tesseract::TextlineProjection;
// NOTE: Keep in sync with textlineprojection.cc.
const int kMinStrongTextValue = 6;
// The fixture for testing Tesseract.
class TextlineProjectionTest : public testing::Test {
protected:
string TestDataNameToPath(const string& name) {
return file::JoinPath(FLAGS_test_srcdir,
"testdata/" + name);
return file::JoinPath(FLAGS_test_srcdir, "testdata/" + name);
}
string TessdataPath() {
return file::JoinPath(FLAGS_test_srcdir,
"tessdata");
return file::JoinPath(FLAGS_test_srcdir, "tessdata");
}
string OutputNameToPath(const string& name) {
return file::JoinPath(FLAGS_test_tmpdir, name);
}
TextlineProjectionTest() {
src_pix_ = NULL;
bin_pix_ = NULL;
tesseract_ = NULL;
finder_ = NULL;
denorm_ = NULL;
projection_ = NULL;
src_pix_ = nullptr;
bin_pix_ = nullptr;
tesseract_ = nullptr;
finder_ = nullptr;
denorm_ = nullptr;
projection_ = nullptr;
}
virtual ~TextlineProjectionTest() {
pixDestroy(&src_pix_);
@ -70,14 +67,14 @@ class TextlineProjectionTest : public testing::Test {
tesseract::TessdataManager mgr;
Tesseract* osd_tess = new Tesseract;
OSResults osr;
EXPECT_EQ(osd_tess->init_tesseract(TessdataPath().c_str(), NULL, "osd",
tesseract::OEM_TESSERACT_ONLY, NULL, 0,
NULL, NULL, false, &mgr),
EXPECT_EQ(osd_tess->init_tesseract(TessdataPath().c_str(), nullptr, "osd",
tesseract::OEM_TESSERACT_ONLY, nullptr, 0,
nullptr, nullptr, false, &mgr),
0);
tesseract_ = new Tesseract;
EXPECT_EQ(tesseract_->init_tesseract(TessdataPath().c_str(), NULL, "eng",
tesseract::OEM_TESSERACT_ONLY, NULL, 0,
NULL, NULL, false, &mgr),
EXPECT_EQ(tesseract_->init_tesseract(TessdataPath().c_str(), nullptr, "eng",
tesseract::OEM_TESSERACT_ONLY, nullptr, 0,
nullptr, nullptr, false, &mgr),
0);
bin_pix_ = api_.GetThresholdedImage();
*tesseract_->mutable_pix_binary() = pixClone(bin_pix_);
@ -91,13 +88,13 @@ class TextlineProjectionTest : public testing::Test {
BLOCK_LIST src_blocks;
BLOCK_IT block_it(&src_blocks);
block_it.add_to_end(block);
Pix* photomask_pix = NULL;
Pix* photomask_pix = nullptr;
// The blocks made by the ColumnFinder. Moved to blocks before return.
BLOCK_LIST found_blocks;
TO_BLOCK_LIST temp_blocks;
finder_ = tesseract_->SetupPageSegAndDetectOrientation(
tesseract::PSM_AUTO_OSD, &src_blocks, osd_tess, &osr, &temp_blocks,
&photomask_pix, NULL);
&photomask_pix, nullptr);
TO_BLOCK_IT to_block_it(&temp_blocks);
TO_BLOCK* to_block = to_block_it.data();
denorm_ = finder_->denorm();
@ -118,16 +115,15 @@ class TextlineProjectionTest : public testing::Test {
const char* text, const char* message) {
int value = projection_->EvaluateBox(box, denorm_, false);
if (greater_or_equal != (value > target_value)) {
LOG(INFO)
<< StringPrintf("EvaluateBox too %s:%d vs %d for %s word '%s' at:",
greater_or_equal ? "low" : "high", value,
target_value,
message, text);
LOG(INFO) << StringPrintf(
"EvaluateBox too %s:%d vs %d for %s word '%s' at:",
greater_or_equal ? "low" : "high", value, target_value, message,
text);
box.print();
value = projection_->EvaluateBox(box, denorm_, true);
} else {
VLOG(1) << StringPrintf("EvaluateBox OK(%d) for %s word '%s'",
value, message, text);
VLOG(1) << StringPrintf("EvaluateBox OK(%d) for %s word '%s'", value,
message, text);
}
if (greater_or_equal) {
EXPECT_GE(value, target_value);
@ -139,12 +135,12 @@ class TextlineProjectionTest : public testing::Test {
// Helper evaluates the DistanceOfBoxFromBox function by expecting that
// box should be nearer to true_box than false_box.
void EvaluateDistance(const TBOX& box, const TBOX& true_box,
const TBOX& false_box,
const char* text, const char* message) {
int true_dist = projection_->DistanceOfBoxFromBox(box, true_box, true,
denorm_, false);
int false_dist = projection_->DistanceOfBoxFromBox(box, false_box, true,
denorm_, false);
const TBOX& false_box, const char* text,
const char* message) {
int true_dist =
projection_->DistanceOfBoxFromBox(box, true_box, true, denorm_, false);
int false_dist =
projection_->DistanceOfBoxFromBox(box, false_box, true, denorm_, false);
if (false_dist <= true_dist) {
LOG(INFO) << StringPrintf("Distance wrong:%d vs %d for %s word '%s' at:",
false_dist, true_dist, message, text);
@ -161,7 +157,7 @@ class TextlineProjectionTest : public testing::Test {
// line_height is the cap + descender size of the text.
void VerifyBoxes(const char* imagefile, int line_height) {
SetImage(imagefile);
api_.Recognize(NULL);
api_.Recognize(nullptr);
SetupProjection();
MutableIterator* it = api_.GetMutableIterator();
do {
@ -194,8 +190,7 @@ class TextlineProjectionTest : public testing::Test {
TBOX lower_box = word_box;
lower_box.set_top(word_box.bottom());
lower_box.set_bottom(word_box.bottom() - padding);
if (tall_word)
lower_box.move(ICOORD(0, padding / 2));
if (tall_word) lower_box.move(ICOORD(0, padding / 2));
EvaluateBox(lower_box, false, kMinStrongTextValue, text, "Lower Word");
EvaluateBox(lower_box, true, -1, text, "Lower Word not vertical");
@ -224,20 +219,19 @@ class TextlineProjectionTest : public testing::Test {
TBOX upper_challenger(upper_box);
upper_challenger.set_bottom(upper_box.top());
upper_challenger.set_top(upper_box.top() + word_box.height());
EvaluateDistance(upper_box, target_box, upper_challenger,
text, "Upper Word");
if (tall_word)
lower_box.move(ICOORD(0, padding / 2));
EvaluateDistance(upper_box, target_box, upper_challenger, text,
"Upper Word");
if (tall_word) lower_box.move(ICOORD(0, padding / 2));
lower_box.set_bottom(lower_box.top() - padding);
target_box = word_box;
target_box.set_bottom(lower_box.top());
TBOX lower_challenger(lower_box);
lower_challenger.set_top(lower_box.bottom());
lower_challenger.set_bottom(lower_box.bottom() - word_box.height());
EvaluateDistance(lower_box, target_box, lower_challenger,
text, "Lower Word");
EvaluateDistance(lower_box, target_box, lower_challenger, text,
"Lower Word");
delete [] text;
delete[] text;
} while (it->Next(tesseract::RIL_WORD));
delete it;
}
@ -254,13 +248,9 @@ class TextlineProjectionTest : public testing::Test {
};
// Tests all word boxes on an unrotated image.
TEST_F(TextlineProjectionTest, Unrotated) {
VerifyBoxes("phototest.tif", 31);
}
TEST_F(TextlineProjectionTest, Unrotated) { VerifyBoxes("phototest.tif", 31); }
// Tests character-level applyboxes on italic Times New Roman.
TEST_F(TextlineProjectionTest, Rotated) {
VerifyBoxes("phototestrot.tif", 31);
}
TEST_F(TextlineProjectionTest, Rotated) { VerifyBoxes("phototestrot.tif", 31); }
} // namespace

View File

@ -23,8 +23,7 @@ namespace {
class TfileTest : public ::testing::Test {
protected:
TfileTest() {
}
TfileTest() {}
// Some data to serialize.
class MathData {
@ -32,11 +31,9 @@ class TfileTest : public ::testing::Test {
MathData() : num_squares_(0), num_triangles_(0) {}
void Setup() {
// Setup some data.
for (int s = 0; s < 42; ++s)
squares_.push_back(s * s);
for (int s = 0; s < 42; ++s) squares_.push_back(s * s);
num_squares_ = squares_.size();
for (int t = 0; t < 52; ++t)
triangles_.push_back(t * (t + 1) / 2);
for (int t = 0; t < 52; ++t) triangles_.push_back(t * (t + 1) / 2);
num_triangles_ = triangles_.size();
}
void ExpectEq(const MathData& other) {
@ -52,7 +49,7 @@ class TfileTest : public ::testing::Test {
if (fp->FWrite(&num_squares_, sizeof(num_squares_), 1) != 1) return false;
if (!squares_.Serialize(fp)) return false;
if (fp->FWrite(&num_triangles_, sizeof(num_triangles_), 1) != 1)
return false;
return false;
if (!triangles_.Serialize(fp)) return false;
return true;
}

View File

@ -10,8 +10,8 @@
// limitations under the License.
#include "unicharcompress.h"
#include "gunit.h"
#include "serialis.h"
#include "printf.h"
#include "serialis.h"
namespace tesseract {
namespace {
@ -21,11 +21,9 @@ class UnicharcompressTest : public ::testing::Test {
// Loads and compresses the given unicharset.
void LoadUnicharset(const string& unicharset_name) {
string radical_stroke_file =
file::JoinPath(FLAGS_test_srcdir,
"langdata/radical-stroke.txt");
string unicharset_file = file::JoinPath(
FLAGS_test_srcdir, "testdata",
unicharset_name);
file::JoinPath(FLAGS_test_srcdir, "langdata/radical-stroke.txt");
string unicharset_file =
file::JoinPath(FLAGS_test_srcdir, "testdata", unicharset_name);
string uni_data;
CHECK_OK(file::GetContents(unicharset_file, &uni_data, file::Defaults()));
string radical_data;

View File

@ -128,9 +128,8 @@ TEST(UnicharsetTest, MultibyteBigrams) {
TEST(UnicharsetTest, OldStyle) {
// This test verifies an old unicharset that contains fi/fl ligatures loads
// and keeps all the entries.
string filename = file::JoinPath(FLAGS_test_srcdir,
"testdata",
"eng.unicharset");
string filename =
file::JoinPath(FLAGS_test_srcdir, "testdata", "eng.unicharset");
UNICHARSET u;
LOG(INFO) << "Filename=" << filename;
EXPECT_TRUE(u.load_from_file(filename.c_str()));

View File

@ -11,8 +11,8 @@
#include "validator.h"
#include "gmock/gmock.h" // for testing::ElementsAreArray
#include "include_gunit.h"
#include "gmock/gmock.h" // for testing::ElementsAreArray
namespace tesseract {
namespace {