2019-01-24 15:01:19 +08:00
|
|
|
// (C) Copyright 2017, Google Inc.
|
|
|
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
// you may not use this file except in compliance with the License.
|
|
|
|
// You may obtain a copy of the License at
|
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
// Unless required by applicable law or agreed to in writing, software
|
|
|
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
// See the License for the specific language governing permissions and
|
|
|
|
// limitations under the License.
|
|
|
|
|
2018-10-18 22:39:35 +08:00
|
|
|
#ifndef TESSERACT_UNITTEST_LSTM_TEST_H_
|
|
|
|
#define TESSERACT_UNITTEST_LSTM_TEST_H_
|
|
|
|
|
|
|
|
#include <memory>
|
|
|
|
#include <string>
|
|
|
|
#include <utility>
|
|
|
|
|
2019-01-24 15:01:19 +08:00
|
|
|
#include "include_gunit.h"
|
|
|
|
|
2018-10-18 22:39:35 +08:00
|
|
|
#include "absl/strings/str_cat.h"
|
2019-01-24 15:01:19 +08:00
|
|
|
#include "tprintf.h"
|
|
|
|
#include "helpers.h"
|
|
|
|
|
|
|
|
#include "functions.h"
|
|
|
|
#include "lang_model_helpers.h"
|
|
|
|
#include "log.h" // for LOG
|
|
|
|
#include "lstmtrainer.h"
|
|
|
|
#include "unicharset.h"
|
2018-10-18 22:39:35 +08:00
|
|
|
|
|
|
|
namespace tesseract {
|
|
|
|
|
|
|
|
#if DEBUG_DETAIL == 0
|
|
|
|
// Number of iterations to run all the trainers.
|
|
|
|
const int kTrainerIterations = 600;
|
|
|
|
// Number of iterations between accuracy checks.
|
|
|
|
const int kBatchIterations = 100;
|
|
|
|
#else
|
|
|
|
// Number of iterations to run all the trainers.
|
|
|
|
const int kTrainerIterations = 2;
|
|
|
|
// Number of iterations between accuracy checks.
|
|
|
|
const int kBatchIterations = 1;
|
|
|
|
#endif
|
|
|
|
|
|
|
|
// The fixture for testing LSTMTrainer.
|
|
|
|
class LSTMTrainerTest : public testing::Test {
|
|
|
|
protected:
|
|
|
|
LSTMTrainerTest() {}
|
2019-01-24 15:01:19 +08:00
|
|
|
std::string TestDataNameToPath(const std::string& name) {
|
|
|
|
return file::JoinPath(TESTDATA_DIR,
|
|
|
|
"" + name);
|
2018-10-18 22:39:35 +08:00
|
|
|
}
|
2019-01-25 22:05:57 +08:00
|
|
|
std::string TessDataNameToPath(const std::string& name) {
|
|
|
|
return file::JoinPath(TESSDATA_DIR,
|
|
|
|
"" + name);
|
|
|
|
}
|
|
|
|
std::string TestingNameToPath(const std::string& name) {
|
|
|
|
return file::JoinPath(TESTING_DIR,
|
|
|
|
"" + name);
|
|
|
|
}
|
2019-02-19 20:53:31 +08:00
|
|
|
|
2019-01-24 15:01:19 +08:00
|
|
|
void SetupTrainerEng(const std::string& network_spec, const std::string& model_name,
|
2018-10-18 22:39:35 +08:00
|
|
|
bool recode, bool adam) {
|
2019-01-24 15:01:19 +08:00
|
|
|
SetupTrainer(network_spec, model_name, "eng/eng.unicharset",
|
2019-01-25 22:05:57 +08:00
|
|
|
"eng.Arial.exp0.lstmf", recode, adam, 5e-4, false, "eng");
|
2018-10-18 22:39:35 +08:00
|
|
|
}
|
2019-01-24 15:01:19 +08:00
|
|
|
void SetupTrainer(const std::string& network_spec, const std::string& model_name,
|
|
|
|
const std::string& unicharset_file, const std::string& lstmf_file,
|
2018-10-18 22:39:35 +08:00
|
|
|
bool recode, bool adam, double learning_rate,
|
2019-01-25 22:05:57 +08:00
|
|
|
bool layer_specific, const std::string& kLang) {
|
|
|
|
// constexpr char kLang[] = "eng"; // Exact value doesn't matter.
|
2019-01-24 15:01:19 +08:00
|
|
|
std::string unicharset_name = TestDataNameToPath(unicharset_file);
|
2018-10-18 22:39:35 +08:00
|
|
|
UNICHARSET unicharset;
|
|
|
|
ASSERT_TRUE(unicharset.load_from_file(unicharset_name.c_str(), false));
|
2019-01-24 15:01:19 +08:00
|
|
|
std::string script_dir = file::JoinPath(
|
|
|
|
LANGDATA_DIR, "");
|
2018-10-18 22:39:35 +08:00
|
|
|
GenericVector<STRING> words;
|
|
|
|
EXPECT_EQ(0, CombineLangModel(unicharset, script_dir, "", FLAGS_test_tmpdir,
|
|
|
|
kLang, !recode, words, words, words, false,
|
|
|
|
nullptr, nullptr));
|
2019-01-24 15:01:19 +08:00
|
|
|
std::string model_path = file::JoinPath(FLAGS_test_tmpdir, model_name);
|
|
|
|
std::string checkpoint_path = model_path + "_checkpoint";
|
2018-10-18 22:39:35 +08:00
|
|
|
trainer_.reset(new LSTMTrainer(nullptr, nullptr, nullptr, nullptr,
|
|
|
|
model_path.c_str(), checkpoint_path.c_str(),
|
|
|
|
0, 0));
|
|
|
|
trainer_->InitCharSet(file::JoinPath(FLAGS_test_tmpdir, kLang,
|
2019-01-25 22:05:57 +08:00
|
|
|
absl::StrCat(kLang, ".traineddata")));
|
2018-10-18 22:39:35 +08:00
|
|
|
int net_mode = adam ? NF_ADAM : 0;
|
|
|
|
// Adam needs a higher learning rate, due to not multiplying the effective
|
|
|
|
// rate by 1/(1-momentum).
|
|
|
|
if (adam) learning_rate *= 20.0;
|
|
|
|
if (layer_specific) net_mode |= NF_LAYER_SPECIFIC_LR;
|
|
|
|
EXPECT_TRUE(trainer_->InitNetwork(network_spec.c_str(), -1, net_mode, 0.1,
|
|
|
|
learning_rate, 0.9, 0.999));
|
|
|
|
GenericVector<STRING> filenames;
|
|
|
|
filenames.push_back(STRING(TestDataNameToPath(lstmf_file).c_str()));
|
|
|
|
EXPECT_TRUE(trainer_->LoadAllTrainingData(filenames, CS_SEQUENTIAL, false));
|
2019-01-24 19:40:53 +08:00
|
|
|
LOG(INFO) << "Setup network:" << model_name << "\n" ;
|
2018-10-18 22:39:35 +08:00
|
|
|
}
|
|
|
|
// Trains for a given number of iterations and returns the char error rate.
|
|
|
|
double TrainIterations(int max_iterations) {
|
|
|
|
int iteration = trainer_->training_iteration();
|
|
|
|
int iteration_limit = iteration + max_iterations;
|
|
|
|
double best_error = 100.0;
|
|
|
|
do {
|
|
|
|
STRING log_str;
|
|
|
|
int target_iteration = iteration + kBatchIterations;
|
|
|
|
// Train a few.
|
|
|
|
double mean_error = 0.0;
|
|
|
|
while (iteration < target_iteration && iteration < iteration_limit) {
|
|
|
|
trainer_->TrainOnLine(trainer_.get(), false);
|
|
|
|
iteration = trainer_->training_iteration();
|
|
|
|
mean_error += trainer_->LastSingleError(ET_CHAR_ERROR);
|
|
|
|
}
|
2019-01-02 05:40:06 +08:00
|
|
|
trainer_->MaintainCheckpoints(nullptr, &log_str);
|
2018-10-18 22:39:35 +08:00
|
|
|
iteration = trainer_->training_iteration();
|
|
|
|
mean_error *= 100.0 / kBatchIterations;
|
|
|
|
LOG(INFO) << log_str.string();
|
2019-01-24 19:40:53 +08:00
|
|
|
LOG(INFO) << "Best error = " << best_error << "\n" ;
|
|
|
|
LOG(INFO) << "Mean error = " << mean_error << "\n" ;
|
2018-10-18 22:39:35 +08:00
|
|
|
if (mean_error < best_error) best_error = mean_error;
|
|
|
|
} while (iteration < iteration_limit);
|
2019-01-24 15:01:19 +08:00
|
|
|
LOG(INFO) << "Trainer error rate = " << best_error << "\n";
|
2018-10-18 22:39:35 +08:00
|
|
|
return best_error;
|
|
|
|
}
|
|
|
|
// Tests for a given number of iterations and returns the char error rate.
|
|
|
|
double TestIterations(int max_iterations) {
|
|
|
|
CHECK_GT(max_iterations, 0);
|
|
|
|
int iteration = trainer_->sample_iteration();
|
|
|
|
double mean_error = 0.0;
|
|
|
|
int error_count = 0;
|
|
|
|
while (error_count < max_iterations) {
|
|
|
|
const ImageData& trainingdata =
|
|
|
|
*trainer_->mutable_training_data()->GetPageBySerial(iteration);
|
|
|
|
NetworkIO fwd_outputs, targets;
|
|
|
|
if (trainer_->PrepareForBackward(&trainingdata, &fwd_outputs, &targets) !=
|
|
|
|
UNENCODABLE) {
|
|
|
|
mean_error += trainer_->NewSingleError(ET_CHAR_ERROR);
|
|
|
|
++error_count;
|
|
|
|
}
|
|
|
|
trainer_->SetIteration(++iteration);
|
|
|
|
}
|
|
|
|
mean_error *= 100.0 / max_iterations;
|
2019-01-24 15:01:19 +08:00
|
|
|
LOG(INFO) << "Tester error rate = " << mean_error << "\n" ;
|
2018-10-18 22:39:35 +08:00
|
|
|
return mean_error;
|
|
|
|
}
|
|
|
|
// Tests that the current trainer_ can be converted to int mode and still gets
|
|
|
|
// within 1% of the error rate. Returns the increase in error from float to
|
|
|
|
// int.
|
|
|
|
double TestIntMode(int test_iterations) {
|
|
|
|
GenericVector<char> trainer_data;
|
|
|
|
EXPECT_TRUE(trainer_->SaveTrainingDump(NO_BEST_TRAINER, trainer_.get(),
|
|
|
|
&trainer_data));
|
|
|
|
// Get the error on the next few iterations in float mode.
|
|
|
|
double float_err = TestIterations(test_iterations);
|
|
|
|
// Restore the dump, convert to int and test error on that.
|
|
|
|
EXPECT_TRUE(trainer_->ReadTrainingDump(trainer_data, trainer_.get()));
|
|
|
|
trainer_->ConvertToInt();
|
|
|
|
double int_err = TestIterations(test_iterations);
|
|
|
|
EXPECT_LT(int_err, float_err + 1.0);
|
|
|
|
return int_err - float_err;
|
|
|
|
}
|
|
|
|
// Sets up a trainer with the given language and given recode+ctc condition.
|
|
|
|
// It then verifies that the given str encodes and decodes back to the same
|
|
|
|
// string.
|
2019-01-24 15:01:19 +08:00
|
|
|
void TestEncodeDecode(const std::string& lang, const std::string& str, bool recode) {
|
|
|
|
std::string unicharset_name = lang + "/" + lang + ".unicharset";
|
2019-01-25 22:05:57 +08:00
|
|
|
std::string lstmf_name = lang + ".Arial_Unicode_MS.exp0.lstmf";
|
2018-10-18 22:39:35 +08:00
|
|
|
SetupTrainer("[1,1,0,32 Lbx100 O1c1]", "bidi-lstm", unicharset_name,
|
2019-01-25 22:05:57 +08:00
|
|
|
lstmf_name, recode, true, 5e-4, true, lang);
|
2018-10-18 22:39:35 +08:00
|
|
|
GenericVector<int> labels;
|
|
|
|
EXPECT_TRUE(trainer_->EncodeString(str.c_str(), &labels));
|
|
|
|
STRING decoded = trainer_->DecodeLabels(labels);
|
2019-01-24 15:01:19 +08:00
|
|
|
std::string decoded_str(&decoded[0], decoded.length());
|
2018-10-18 22:39:35 +08:00
|
|
|
EXPECT_EQ(str, decoded_str);
|
|
|
|
}
|
|
|
|
// Calls TestEncodeDeode with both recode on and off.
|
2019-01-24 15:01:19 +08:00
|
|
|
void TestEncodeDecodeBoth(const std::string& lang, const std::string& str) {
|
2018-10-18 22:39:35 +08:00
|
|
|
TestEncodeDecode(lang, str, false);
|
|
|
|
TestEncodeDecode(lang, str, true);
|
|
|
|
}
|
|
|
|
|
|
|
|
std::unique_ptr<LSTMTrainer> trainer_;
|
|
|
|
};
|
|
|
|
|
|
|
|
} // namespace tesseract.
|
|
|
|
|
|
|
|
#endif // THIRD_PARTY_TESSERACT_UNITTEST_LSTM_TEST_H_
|