tesseract  4.00.00dev
trainingsample.cpp
Go to the documentation of this file.
1 // Copyright 2010 Google Inc. All Rights Reserved.
2 // Author: rays@google.com (Ray Smith)
3 //
4 // Licensed under the Apache License, Version 2.0 (the "License");
5 // you may not use this file except in compliance with the License.
6 // You may obtain a copy of the License at
7 // http://www.apache.org/licenses/LICENSE-2.0
8 // Unless required by applicable law or agreed to in writing, software
9 // distributed under the License is distributed on an "AS IS" BASIS,
10 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11 // See the License for the specific language governing permissions and
12 // limitations under the License.
13 //
15 
16 // Include automatically generated configuration file if running autoconf.
17 #ifdef HAVE_CONFIG_H
18 #include "config_auto.h"
19 #endif
20 
21 #include "trainingsample.h"
22 
23 #include <math.h>
24 #include "allheaders.h"
25 #include "helpers.h"
26 #include "intfeaturemap.h"
27 #include "normfeat.h"
28 #include "shapetable.h"
29 
30 namespace tesseract {
31 
32 ELISTIZE(TrainingSample)
33 
34 // Center of randomizing operations.
35 const int kRandomizingCenter = 128;
36 
37 // Randomizing factors.
38 const int TrainingSample::kYShiftValues[kSampleYShiftSize] = {
39  6, 3, -3, -6, 0
40 };
41 const double TrainingSample::kScaleValues[kSampleScaleSize] = {
42  1.0625, 0.9375, 1.0
43 };
44 
46  delete [] features_;
47  delete [] micro_features_;
48 }
49 
50 // WARNING! Serialize/DeSerialize do not save/restore the "cache" data
51 // members, which is mostly the mapped features, and the weight.
52 // It is assumed these can all be reconstructed from what is saved.
53 // Writes to the given file. Returns false in case of error.
54 bool TrainingSample::Serialize(FILE* fp) const {
55  if (fwrite(&class_id_, sizeof(class_id_), 1, fp) != 1) return false;
56  if (fwrite(&font_id_, sizeof(font_id_), 1, fp) != 1) return false;
57  if (fwrite(&page_num_, sizeof(page_num_), 1, fp) != 1) return false;
58  if (!bounding_box_.Serialize(fp)) return false;
59  if (fwrite(&num_features_, sizeof(num_features_), 1, fp) != 1) return false;
60  if (fwrite(&num_micro_features_, sizeof(num_micro_features_), 1, fp) != 1)
61  return false;
62  if (fwrite(&outline_length_, sizeof(outline_length_), 1, fp) != 1)
63  return false;
64  if (static_cast<int>(fwrite(features_, sizeof(*features_), num_features_, fp))
65  != num_features_)
66  return false;
67  if (static_cast<int>(fwrite(micro_features_, sizeof(*micro_features_),
68  num_micro_features_,
69  fp)) != num_micro_features_)
70  return false;
71  if (fwrite(cn_feature_, sizeof(*cn_feature_), kNumCNParams, fp) !=
72  kNumCNParams) return false;
73  if (fwrite(geo_feature_, sizeof(*geo_feature_), GeoCount, fp) != GeoCount)
74  return false;
75  return true;
76 }
77 
78 // Creates from the given file. Returns NULL in case of error.
79 // If swap is true, assumes a big/little-endian swap is needed.
82  if (sample->DeSerialize(swap, fp)) return sample;
83  delete sample;
84  return NULL;
85 }
86 
87 // Reads from the given file. Returns false in case of error.
88 // If swap is true, assumes a big/little-endian swap is needed.
89 bool TrainingSample::DeSerialize(bool swap, FILE* fp) {
90  if (fread(&class_id_, sizeof(class_id_), 1, fp) != 1) return false;
91  if (fread(&font_id_, sizeof(font_id_), 1, fp) != 1) return false;
92  if (fread(&page_num_, sizeof(page_num_), 1, fp) != 1) return false;
93  if (!bounding_box_.DeSerialize(swap, fp)) return false;
94  if (fread(&num_features_, sizeof(num_features_), 1, fp) != 1) return false;
95  if (fread(&num_micro_features_, sizeof(num_micro_features_), 1, fp) != 1)
96  return false;
97  if (fread(&outline_length_, sizeof(outline_length_), 1, fp) != 1)
98  return false;
99  if (swap) {
100  ReverseN(&class_id_, sizeof(class_id_));
101  ReverseN(&num_features_, sizeof(num_features_));
102  ReverseN(&num_micro_features_, sizeof(num_micro_features_));
103  ReverseN(&outline_length_, sizeof(outline_length_));
104  }
105  delete [] features_;
106  features_ = new INT_FEATURE_STRUCT[num_features_];
107  if (static_cast<int>(fread(features_, sizeof(*features_), num_features_, fp))
108  != num_features_)
109  return false;
110  delete [] micro_features_;
111  micro_features_ = new MicroFeature[num_micro_features_];
112  if (static_cast<int>(fread(micro_features_, sizeof(*micro_features_),
113  num_micro_features_,
114  fp)) != num_micro_features_)
115  return false;
116  if (fread(cn_feature_, sizeof(*cn_feature_), kNumCNParams, fp) !=
117  kNumCNParams) return false;
118  if (fread(geo_feature_, sizeof(*geo_feature_), GeoCount, fp) != GeoCount)
119  return false;
120  return true;
121 }
122 
123 // Saves the given features into a TrainingSample.
125  const INT_FX_RESULT_STRUCT& fx_info,
126  const TBOX& bounding_box,
128  int num_features) {
130  sample->num_features_ = num_features;
131  sample->features_ = new INT_FEATURE_STRUCT[num_features];
132  sample->outline_length_ = fx_info.Length;
133  memcpy(sample->features_, features, num_features * sizeof(features[0]));
134  sample->geo_feature_[GeoBottom] = bounding_box.bottom();
135  sample->geo_feature_[GeoTop] = bounding_box.top();
136  sample->geo_feature_[GeoWidth] = bounding_box.width();
137 
138  // Generate the cn_feature_ from the fx_info.
139  sample->cn_feature_[CharNormY] =
141  sample->cn_feature_[CharNormLength] =
143  sample->cn_feature_[CharNormRx] = MF_SCALE_FACTOR * fx_info.Rx;
144  sample->cn_feature_[CharNormRy] = MF_SCALE_FACTOR * fx_info.Ry;
145 
146  sample->features_are_indexed_ = false;
147  sample->features_are_mapped_ = false;
148  return sample;
149 }
150 
151 // Returns the cn_feature as a FEATURE_STRUCT* needed by cntraining.
153  FEATURE feature = NewFeature(&CharNormDesc);
154  for (int i = 0; i < kNumCNParams; ++i)
155  feature->Params[i] = cn_feature_[i];
156  return feature;
157 }
158 
159 // Constructs and returns a copy randomized by the method given by
160 // the randomizer index. If index is out of [0, kSampleRandomSize) then
161 // an exact copy is returned.
164  if (index >= 0 && index < kSampleRandomSize) {
165  ++index; // Remove the first combination.
166  int yshift = kYShiftValues[index / kSampleScaleSize];
167  double scaling = kScaleValues[index % kSampleScaleSize];
168  for (int i = 0; i < num_features_; ++i) {
169  double result = (features_[i].X - kRandomizingCenter) * scaling;
170  result += kRandomizingCenter;
171  sample->features_[i].X = ClipToRange(static_cast<int>(result + 0.5), 0,
172  MAX_UINT8);
173  result = (features_[i].Y - kRandomizingCenter) * scaling;
174  result += kRandomizingCenter + yshift;
175  sample->features_[i].Y = ClipToRange(static_cast<int>(result + 0.5), 0,
176  MAX_UINT8);
177  }
178  }
179  return sample;
180 }
181 
182 // Constructs and returns an exact copy.
185  sample->class_id_ = class_id_;
186  sample->font_id_ = font_id_;
187  sample->weight_ = weight_;
188  sample->sample_index_ = sample_index_;
189  sample->num_features_ = num_features_;
190  if (num_features_ > 0) {
191  sample->features_ = new INT_FEATURE_STRUCT[num_features_];
192  memcpy(sample->features_, features_, num_features_ * sizeof(features_[0]));
193  }
194  sample->num_micro_features_ = num_micro_features_;
195  if (num_micro_features_ > 0) {
196  sample->micro_features_ = new MicroFeature[num_micro_features_];
197  memcpy(sample->micro_features_, micro_features_,
198  num_micro_features_ * sizeof(micro_features_[0]));
199  }
200  memcpy(sample->cn_feature_, cn_feature_, sizeof(*cn_feature_) * kNumCNParams);
201  memcpy(sample->geo_feature_, geo_feature_, sizeof(*geo_feature_) * GeoCount);
202  return sample;
203 }
204 
205 // Extracts the needed information from the CHAR_DESC_STRUCT.
206 void TrainingSample::ExtractCharDesc(int int_feature_type,
207  int micro_type,
208  int cn_type,
209  int geo_type,
210  CHAR_DESC_STRUCT* char_desc) {
211  // Extract the INT features.
212  delete[] features_;
213  FEATURE_SET_STRUCT* char_features = char_desc->FeatureSets[int_feature_type];
214  if (char_features == NULL) {
215  tprintf("Error: no features to train on of type %s\n",
217  num_features_ = 0;
218  features_ = NULL;
219  } else {
220  num_features_ = char_features->NumFeatures;
221  features_ = new INT_FEATURE_STRUCT[num_features_];
222  for (int f = 0; f < num_features_; ++f) {
223  features_[f].X =
224  static_cast<uinT8>(char_features->Features[f]->Params[IntX]);
225  features_[f].Y =
226  static_cast<uinT8>(char_features->Features[f]->Params[IntY]);
227  features_[f].Theta =
228  static_cast<uinT8>(char_features->Features[f]->Params[IntDir]);
229  features_[f].CP_misses = 0;
230  }
231  }
232  // Extract the Micro features.
233  delete[] micro_features_;
234  char_features = char_desc->FeatureSets[micro_type];
235  if (char_features == NULL) {
236  tprintf("Error: no features to train on of type %s\n",
238  num_micro_features_ = 0;
239  micro_features_ = NULL;
240  } else {
241  num_micro_features_ = char_features->NumFeatures;
242  micro_features_ = new MicroFeature[num_micro_features_];
243  for (int f = 0; f < num_micro_features_; ++f) {
244  for (int d = 0; d < MFCount; ++d) {
245  micro_features_[f][d] = char_features->Features[f]->Params[d];
246  }
247  }
248  }
249  // Extract the CN feature.
250  char_features = char_desc->FeatureSets[cn_type];
251  if (char_features == NULL) {
252  tprintf("Error: no CN feature to train on.\n");
253  } else {
254  ASSERT_HOST(char_features->NumFeatures == 1);
255  cn_feature_[CharNormY] = char_features->Features[0]->Params[CharNormY];
256  cn_feature_[CharNormLength] =
257  char_features->Features[0]->Params[CharNormLength];
258  cn_feature_[CharNormRx] = char_features->Features[0]->Params[CharNormRx];
259  cn_feature_[CharNormRy] = char_features->Features[0]->Params[CharNormRy];
260  }
261  // Extract the Geo feature.
262  char_features = char_desc->FeatureSets[geo_type];
263  if (char_features == NULL) {
264  tprintf("Error: no Geo feature to train on.\n");
265  } else {
266  ASSERT_HOST(char_features->NumFeatures == 1);
267  geo_feature_[GeoBottom] = char_features->Features[0]->Params[GeoBottom];
268  geo_feature_[GeoTop] = char_features->Features[0]->Params[GeoTop];
269  geo_feature_[GeoWidth] = char_features->Features[0]->Params[GeoWidth];
270  }
271  features_are_indexed_ = false;
272  features_are_mapped_ = false;
273 }
274 
275 // Sets the mapped_features_ from the features_ using the provided
276 // feature_space to the indexed versions of the features.
277 void TrainingSample::IndexFeatures(const IntFeatureSpace& feature_space) {
279  feature_space.IndexAndSortFeatures(features_, num_features_,
280  &mapped_features_);
281  features_are_indexed_ = true;
282  features_are_mapped_ = false;
283 }
284 
285 // Sets the mapped_features_ from the features using the provided
286 // feature_map.
287 void TrainingSample::MapFeatures(const IntFeatureMap& feature_map) {
289  feature_map.feature_space().IndexAndSortFeatures(features_, num_features_,
290  &indexed_features);
291  feature_map.MapIndexedFeatures(indexed_features, &mapped_features_);
292  features_are_indexed_ = false;
293  features_are_mapped_ = true;
294 }
295 
296 // Returns a pix representing the sample. (Int features only.)
297 Pix* TrainingSample::RenderToPix(const UNICHARSET* unicharset) const {
298  Pix* pix = pixCreate(kIntFeatureExtent, kIntFeatureExtent, 1);
299  for (int f = 0; f < num_features_; ++f) {
300  int start_x = features_[f].X;
301  int start_y = kIntFeatureExtent - features_[f].Y;
302  double dx = cos((features_[f].Theta / 256.0) * 2.0 * PI - PI);
303  double dy = -sin((features_[f].Theta / 256.0) * 2.0 * PI - PI);
304  for (int i = 0; i <= 5; ++i) {
305  int x = static_cast<int>(start_x + dx * i);
306  int y = static_cast<int>(start_y + dy * i);
307  if (x >= 0 && x < 256 && y >= 0 && y < 256)
308  pixSetPixel(pix, x, y, 1);
309  }
310  }
311  if (unicharset != NULL)
312  pixSetText(pix, unicharset->id_to_unichar(class_id_));
313  return pix;
314 }
315 
316 // Displays the features in the given window with the given color.
318  ScrollView* window) const {
319  #ifndef GRAPHICS_DISABLED
320  for (int f = 0; f < num_features_; ++f) {
321  RenderIntFeature(window, &features_[f], color);
322  }
323  #endif // GRAPHICS_DISABLED
324 }
325 
326 // Returns a pix of the original sample image. The pix is padded all round
327 // by padding wherever possible.
328 // The returned Pix must be pixDestroyed after use.
329 // If the input page_pix is NULL, NULL is returned.
330 Pix* TrainingSample::GetSamplePix(int padding, Pix* page_pix) const {
331  if (page_pix == NULL)
332  return NULL;
333  int page_width = pixGetWidth(page_pix);
334  int page_height = pixGetHeight(page_pix);
335  TBOX padded_box = bounding_box();
336  padded_box.pad(padding, padding);
337  // Clip the padded_box to the limits of the page
338  TBOX page_box(0, 0, page_width, page_height);
339  padded_box &= page_box;
340  Box* box = boxCreate(page_box.left(), page_height - page_box.top(),
341  page_box.width(), page_box.height());
342  Pix* sample_pix = pixClipRectangle(page_pix, box, NULL);
343  boxDestroy(&box);
344  return sample_pix;
345 }
346 
347 } // namespace tesseract
bool DeSerialize(bool swap, FILE *fp)
Definition: rect.cpp:192
FEATURE Features[1]
Definition: ocrfeatures.h:72
void MapFeatures(const IntFeatureMap &feature_map)
FEATURE_STRUCT * GetCNFeature() const
const FEATURE_DESC_STRUCT CharNormDesc
Definition: picofeat.h:30
#define MAX_UINT8
Definition: host.h:63
#define LENGTH_COMPRESSION
Definition: normfeat.h:26
const char * kMicroFeatureType
Definition: featdefs.cpp:41
bool Serialize(FILE *fp) const
Definition: rect.cpp:185
TrainingSample * RandomizedCopy(int index) const
bool DeSerialize(bool swap, FILE *fp)
#define tprintf(...)
Definition: tprintf.h:31
static TrainingSample * DeSerializeCreate(bool swap, FILE *fp)
const int kBlnBaselineOffset
Definition: normalis.h:29
void IndexFeatures(const IntFeatureSpace &feature_space)
#define MF_SCALE_FACTOR
Definition: mfoutline.h:63
FEATURE_SET FeatureSets[NUM_FEATURE_TYPES]
Definition: featdefs.h:44
#define ASSERT_HOST(x)
Definition: errcode.h:84
const char * id_to_unichar(UNICHAR_ID id) const
Definition: unicharset.cpp:266
inT16 left() const
Definition: rect.h:68
float MicroFeature[MFCount]
Definition: mf.h:33
#define PI
Definition: const.h:19
void RenderIntFeature(ScrollView *window, const INT_FEATURE_STRUCT *Feature, ScrollView::Color color)
Definition: intproto.cpp:1693
void IndexAndSortFeatures(const INT_FEATURE_STRUCT *features, int num_features, GenericVector< int > *sorted_features) const
T ClipToRange(const T &x, const T &lower_bound, const T &upper_bound)
Definition: helpers.h:122
Pix * GetSamplePix(int padding, Pix *page_pix) const
FEATURE NewFeature(const FEATURE_DESC_STRUCT *FeatureDesc)
Definition: ocrfeatures.cpp:88
void pad(int xpad, int ypad)
Definition: rect.h:127
Definition: picofeat.h:29
Definition: mf.h:30
int MapIndexedFeatures(const GenericVector< int > &index_features, GenericVector< int > *map_features) const
inT16 top() const
Definition: rect.h:54
const int kIntFeatureExtent
Definition: rect.h:30
void ExtractCharDesc(int feature_type, int micro_type, int cn_type, int geo_type, CHAR_DESC_STRUCT *char_desc)
inT16 height() const
Definition: rect.h:104
uint8_t uinT8
Definition: host.h:35
const char * kIntFeatureType
Definition: featdefs.cpp:43
TrainingSample * Copy() const
const INT_FEATURE_STRUCT * features() const
inT16 width() const
Definition: rect.h:111
Pix * RenderToPix(const UNICHARSET *unicharset) const
inT16 bottom() const
Definition: rect.h:61
const int kRandomizingCenter
bool Serialize(FILE *fp) const
FLOAT32 Params[1]
Definition: ocrfeatures.h:65
void DisplayFeatures(ScrollView::Color color, ScrollView *window) const
Definition: cluster.h:32
const GenericVector< int > & indexed_features() const
ELISTIZE(AmbigSpec)
const TBOX & bounding_box() const
const IntFeatureSpace & feature_space() const
Definition: intfeaturemap.h:60
static TrainingSample * CopyFromFeatures(const INT_FX_RESULT_STRUCT &fx_info, const TBOX &bounding_box, const INT_FEATURE_STRUCT *features, int num_features)
void ReverseN(void *ptr, int num_bytes)
Definition: helpers.h:184