mirror of
https://github.com/tesseract-ocr/tesseract.git
synced 2024-12-12 23:49:06 +08:00
104 lines
3.1 KiB
C++
104 lines
3.1 KiB
C++
// Copyright 2008 Google Inc.
|
|
// All Rights Reserved.
|
|
// Author: ahmadab@google.com (Ahmad Abdulkader)
|
|
//
|
|
// neuron.cpp: The implementation of a class for an object
|
|
// that represents a single neuron in a neural network
|
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
// you may not use this file except in compliance with the License.
|
|
// You may obtain a copy of the License at
|
|
// http://www.apache.org/licenses/LICENSE-2.0
|
|
// Unless required by applicable law or agreed to in writing, software
|
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
// See the License for the specific language governing permissions and
|
|
// limitations under the License.
|
|
|
|
#include "neuron.h"
|
|
#include "input_file_buffer.h"
|
|
|
|
namespace tesseract {
|
|
|
|
// Instantiate all supported templates
|
|
template bool Neuron::ReadBinary(InputFileBuffer *input_buffer);
|
|
|
|
// default and only constructor
|
|
Neuron::Neuron() {
|
|
Init();
|
|
}
|
|
|
|
// virtual destructor
|
|
Neuron::~Neuron() {
|
|
}
|
|
|
|
// Initializer
|
|
void Neuron::Init() {
|
|
id_ = -1;
|
|
frwd_dirty_ = false;
|
|
fan_in_.clear();
|
|
fan_in_weights_.clear();
|
|
activation_ = 0.0f;
|
|
output_ = 0.0f;
|
|
bias_ = 0.0f;
|
|
node_type_ = Unknown;
|
|
}
|
|
|
|
// Computes the activation and output of the neuron if not fresh
|
|
// by pulling the outputs of all fan-in neurons
|
|
void Neuron::FeedForward() {
|
|
if (!frwd_dirty_ ) {
|
|
return;
|
|
}
|
|
// nothing to do for input nodes: just pass the input to the o/p
|
|
// otherwise, pull the output of all fan-in neurons
|
|
if (node_type_ != Input) {
|
|
int fan_in_cnt = fan_in_.size();
|
|
// sum out the activation
|
|
activation_ = -bias_;
|
|
for (int in = 0; in < fan_in_cnt; in++) {
|
|
if (fan_in_[in]->frwd_dirty_) {
|
|
fan_in_[in]->FeedForward();
|
|
}
|
|
activation_ += ((*(fan_in_weights_[in])) * fan_in_[in]->output_);
|
|
}
|
|
// sigmoid it
|
|
output_ = Sigmoid(activation_);
|
|
}
|
|
frwd_dirty_ = false;
|
|
}
|
|
|
|
// set the type of the neuron
|
|
void Neuron::set_node_type(NeuronTypes Type) {
|
|
node_type_ = Type;
|
|
}
|
|
|
|
// Adds new connections *to* this neuron *From*
|
|
// a target neuron using specfied params
|
|
// Note that what is actually copied in this function are pointers to the
|
|
// specified Neurons and weights and not the actualt values. This is by
|
|
// design to centralize the alloction of neurons and weights and so
|
|
// increase the locality of reference and improve cache-hits resulting
|
|
// in a faster net. This technique resulted in a 2X-10X speedup
|
|
// (depending on network size and processor)
|
|
void Neuron::AddFromConnection(Neuron *neurons,
|
|
float *wts_offset,
|
|
int from_cnt) {
|
|
for (int in = 0; in < from_cnt; in++) {
|
|
fan_in_.push_back(neurons + in);
|
|
fan_in_weights_.push_back(wts_offset + in);
|
|
}
|
|
}
|
|
|
|
// fast computation of sigmoid function using a lookup table
|
|
// defined in sigmoid_table.cpp
|
|
float Neuron::Sigmoid(float activation) {
|
|
if (activation <= -10.0f) {
|
|
return 0.0f;
|
|
} else if (activation >= 10.0f) {
|
|
return 1.0f;
|
|
} else {
|
|
return kSigmoidTable[static_cast<int>(100 * (activation + 10.0))];
|
|
}
|
|
}
|
|
}
|