23 #include "config_auto.h" 66 #define ADAPT_TEMPLATE_SUFFIX ".a" 68 #define MAX_MATCHES 10 69 #define UNLIKELY_NUM_FEAT 200 71 #define MAX_ADAPTABLE_WERD_SIZE 40 73 #define ADAPTABLE_WERD_ADJUSTMENT (0.05) 75 #define Y_DIM_OFFSET (Y_SHIFT - BASELINE_Y_SHIFT) 77 #define WORST_POSSIBLE_RATING (0.0f) 95 HasNonfragment =
false;
100 best_unichar_id = INVALID_UNICHAR_ID;
101 best_match_index = -1;
103 for (
int i = 0; i < match.
size(); ++i) {
104 if (match[i].rating > best_rating) {
105 best_rating = match[i].rating;
106 best_unichar_id = match[i].unichar_id;
107 best_match_index = i;
122 inline bool MarginalMatch(
float confidence,
float matcher_great_threshold) {
123 return (1.0f - confidence) > matcher_great_threshold;
132 for (
int i = 0; i < results.
match.
size(); i++) {
133 if (results.
match[i].unichar_id ==
id)
142 int index = FindScoredUnichar(
id, results);
144 return results.
match[index].rating;
185 void Classify::AdaptiveClassifier(
TBLOB *Blob, BLOB_CHOICE_LIST *Choices) {
186 assert(Choices != NULL);
192 DoAdaptiveMatch(Blob, Results);
194 RemoveBadMatches(Results);
195 Results->
match.
sort(&UnicharRating::SortDescendingRating);
196 RemoveExtraPuncs(Results);
202 if (LargeSpeckle(*Blob) || Choices->length() == 0)
203 AddLargeSpeckleTo(Results->
BlobLength, Choices);
205 if (matcher_debug_level >= 1) {
207 PrintAdaptiveMatchResults(*Results);
210 #ifndef GRAPHICS_DISABLED 211 if (classify_enable_adaptive_debugger)
212 DebugAdaptiveClassifier(Blob, Results);
220 void Classify::RefreshDebugWindow(
ScrollView **win,
const char *msg,
221 int y_offset,
const TBOX &wbox) {
222 #ifndef GRAPHICS_DISABLED 223 const int kSampleSpaceWidth = 500;
225 *win =
new ScrollView(msg, 100, y_offset, kSampleSpaceWidth * 2, 200,
226 kSampleSpaceWidth * 2, 200,
true);
229 (*win)->Pen(64, 64, 64);
234 (*win)->ZoomToRectangle(wbox.
left(), wbox.
top(),
236 #endif // GRAPHICS_DISABLED 244 void Classify::LearnWord(
const char* fontname,
WERD_RES* word) {
246 if (word_len == 0)
return;
248 float* thresholds = NULL;
249 if (fontname == NULL) {
254 if (classify_learning_debug_level >= 1)
255 tprintf(
"\n\nAdapting to word = %s\n",
257 thresholds =
new float[word_len];
259 matcher_perfect_threshold,
260 matcher_good_threshold,
261 matcher_rating_margin, thresholds);
265 #ifndef GRAPHICS_DISABLED 266 if (classify_debug_character_fragments) {
267 if (learn_fragmented_word_debug_win_ != NULL) {
270 RefreshDebugWindow(&learn_fragments_debug_win_,
"LearnPieces", 400,
272 RefreshDebugWindow(&learn_fragmented_word_debug_win_,
"LearnWord", 200,
277 #endif // GRAPHICS_DISABLED 279 for (
int ch = 0; ch < word_len; ++ch) {
280 if (classify_debug_character_fragments) {
284 float threshold = thresholds != NULL ? thresholds[ch] : 0.0f;
286 LearnPieces(fontname, start_blob, word->
best_state[ch], threshold,
293 bool garbage =
false;
295 for (frag = 0; frag < word->
best_state[ch]; ++frag) {
297 if (classify_character_fragments_garbage_certainty_threshold < 0) {
298 garbage |= LooksLikeGarbage(frag_blob);
305 if (pieces_all_natural || !prioritize_division) {
306 for (frag = 0; frag < word->
best_state[ch]; ++frag) {
311 tokens[0].
string(), frag, word->
best_state[ch],
315 for (
int i = 0; i < tokens.
size(); i++) {
316 full_string += tokens[i];
317 if (i != tokens.
size() - 1)
320 LearnPieces(fontname, start_blob + frag, 1, threshold,
356 delete [] thresholds;
368 void Classify::LearnPieces(
const char* fontname,
int start,
int length,
370 const char* correct_text,
WERD_RES* word) {
384 if (rotated_blob == NULL)
387 #ifndef GRAPHICS_DISABLED 389 if (strcmp(classify_learn_debug_str.string(), correct_text) == 0) {
390 RefreshDebugWindow(&learn_debug_win_,
"LearnPieces", 600,
393 learn_debug_win_->Update();
396 if (classify_debug_character_fragments && segmentation ==
CST_FRAGMENT) {
398 blob->
plot(learn_fragments_debug_win_,
400 learn_fragments_debug_win_->Update();
402 #endif // GRAPHICS_DISABLED 404 if (fontname != NULL) {
405 classify_norm_method.set_value(
character);
406 tess_bn_matching.set_value(
false);
407 tess_cn_matching.set_value(
false);
408 DENORM bl_denorm, cn_denorm;
410 SetupBLCNDenorms(*rotated_blob, classify_nonlinear_norm,
411 &bl_denorm, &cn_denorm, &fx_info);
412 LearnBlob(fontname, rotated_blob, cn_denorm, fx_info, correct_text);
413 }
else if (unicharset.contains_unichar(correct_text)) {
414 UNICHAR_ID class_id = unicharset.unichar_to_id(correct_text);
415 int font_id = word->
fontinfo != NULL
416 ? fontinfo_table_.get_id(*word->
fontinfo)
418 if (classify_learning_debug_level >= 1)
419 tprintf(
"Adapting to char = %s, thr= %g font_id= %d\n",
420 unicharset.id_to_unichar(class_id), threshold, font_id);
423 AdaptToChar(rotated_blob, class_id, font_id, threshold, AdaptedTemplates);
424 if (BackupAdaptedTemplates != NULL) {
427 AdaptToChar(rotated_blob, class_id, font_id, threshold,
428 BackupAdaptedTemplates);
430 }
else if (classify_debug_level >= 1) {
431 tprintf(
"Can't adapt to %s not in unicharset\n", correct_text);
433 if (rotated_blob != blob) {
456 void Classify::EndAdaptiveClassifier() {
460 if (AdaptedTemplates != NULL &&
461 classify_enable_adaptive_matcher && classify_save_adapted_templates) {
463 File = fopen (Filename.
string(),
"wb");
465 cprintf (
"Unable to save adapted templates to %s!\n", Filename.
string());
467 cprintf (
"\nSaving adapted templates to %s ...", Filename.
string());
469 WriteAdaptedTemplates(File, AdaptedTemplates);
475 if (AdaptedTemplates != NULL) {
477 AdaptedTemplates = NULL;
479 if (BackupAdaptedTemplates != NULL) {
481 BackupAdaptedTemplates = NULL;
484 if (PreTrainedTemplates != NULL) {
486 PreTrainedTemplates = NULL;
488 getDict().EndDangerousAmbigs();
490 if (AllProtosOn != NULL) {
497 AllConfigsOff = NULL;
498 TempProtoMask = NULL;
502 if (static_classifier_ != NULL) {
503 delete static_classifier_;
504 static_classifier_ = NULL;
528 if (!classify_enable_adaptive_matcher)
530 if (AllProtosOn != NULL)
531 EndAdaptiveClassifier();
535 if (language_data_path_prefix.length() > 0 && mgr !=
nullptr) {
538 PreTrainedTemplates = ReadIntTemplates(&fp);
542 if (!shape_table_->DeSerialize(&fp)) {
543 tprintf(
"Error loading shape table!\n");
550 ReadNewCutoffs(&fp, CharNormCutoffs);
557 im_.Init(&classify_debug_level);
569 BaselineCutoffs[i] = 0;
572 if (classify_use_pre_adapted_templates) {
576 Filename = imagefile;
579 AdaptedTemplates = NewAdaptedTemplates(
true);
581 cprintf(
"\nReading pre-adapted templates from %s ...\n",
584 AdaptedTemplates = ReadAdaptedTemplates(&fp);
586 PrintAdaptedTemplates(stdout, AdaptedTemplates);
588 for (
int i = 0; i < AdaptedTemplates->Templates->NumClasses; i++) {
589 BaselineCutoffs[i] = CharNormCutoffs[i];
593 if (AdaptedTemplates != NULL)
595 AdaptedTemplates = NewAdaptedTemplates(
true);
599 void Classify::ResetAdaptiveClassifierInternal() {
600 if (classify_learning_debug_level > 0) {
601 tprintf(
"Resetting adaptive classifier (NumAdaptationsFailed=%d)\n",
602 NumAdaptationsFailed);
605 AdaptedTemplates = NewAdaptedTemplates(
true);
606 if (BackupAdaptedTemplates != NULL)
608 BackupAdaptedTemplates = NULL;
609 NumAdaptationsFailed = 0;
614 void Classify::SwitchAdaptiveClassifier() {
615 if (BackupAdaptedTemplates == NULL) {
616 ResetAdaptiveClassifierInternal();
619 if (classify_learning_debug_level > 0) {
620 tprintf(
"Switch to backup adaptive classifier (NumAdaptationsFailed=%d)\n",
621 NumAdaptationsFailed);
624 AdaptedTemplates = BackupAdaptedTemplates;
625 BackupAdaptedTemplates = NULL;
626 NumAdaptationsFailed = 0;
630 void Classify::StartBackupAdaptiveClassifier() {
631 if (BackupAdaptedTemplates != NULL)
633 BackupAdaptedTemplates = NewAdaptedTemplates(
true);
656 void Classify::SettupPass1() {
657 EnableLearning = classify_enable_learning;
659 getDict().SettupStopperPass1();
676 void Classify::SettupPass2() {
677 EnableLearning =
FALSE;
678 getDict().SettupStopperPass2();
703 void Classify::InitAdaptedClass(
TBLOB *Blob,
717 classify_norm_method.set_value(
baseline);
718 Features = ExtractOutlineFeatures(Blob);
729 if (Templates == AdaptedTemplates)
730 BaselineCutoffs[ClassId] = CharNormCutoffs[ClassId];
734 for (Fid = 0; Fid < Features->
NumFeatures; Fid++) {
740 Proto = &(TempProto->
Proto);
754 ConvertProto(Proto, Pid, IClass);
756 classify_learning_debug_level >= 2);
765 if (classify_learning_debug_level >= 1) {
766 tprintf(
"Added new class '%s' with class id %d and %d protos.\n",
767 unicharset.id_to_unichar(ClassId), ClassId, NumFeatures);
768 if (classify_learning_debug_level > 1)
769 DisplayAdaptedChar(Blob, IClass);
798 int Classify::GetAdaptiveFeatures(
TBLOB *Blob,
804 classify_norm_method.set_value(
baseline);
805 Features = ExtractPicoFeatures(Blob);
813 ComputeIntFeatures(Features, IntFeatures);
814 *FloatFeatures = Features;
839 float adaptable_score =
842 BestChoiceLength > 0 &&
888 Class = adaptive_templates->
Class[ClassId];
889 assert(Class != NULL);
891 InitAdaptedClass(Blob, ClassId, FontinfoId, Class, adaptive_templates);
895 NumFeatures = GetAdaptiveFeatures(Blob, IntFeatures, &FloatFeatures);
896 if (NumFeatures <= 0) {
902 for (
int cfg = 0; cfg < IClass->
NumConfigs; ++cfg) {
903 if (GetFontinfoId(Class, cfg) == FontinfoId) {
904 SET_BIT(MatchingFontConfigs, cfg);
909 im_.Match(IClass, AllProtosOn, MatchingFontConfigs,
910 NumFeatures, IntFeatures,
911 &int_result, classify_adapt_feature_threshold,
912 NO_DEBUG, matcher_debug_separate_windows);
917 if (1.0f - int_result.
rating <= Threshold) {
919 if (classify_learning_debug_level >= 1)
920 tprintf(
"Found good match to perm config %d = %4.1f%%.\n",
931 if (classify_learning_debug_level >= 1)
932 tprintf(
"Increasing reliability of temp config %d to %d.\n",
935 if (TempConfigReliable(ClassId, TempConfig)) {
936 MakePermanent(adaptive_templates, ClassId, int_result.
config, Blob);
937 UpdateAmbigsGroup(ClassId, Blob);
940 if (classify_learning_debug_level >= 1) {
941 tprintf(
"Found poor match to temp config %d = %4.1f%%.\n",
943 if (classify_learning_debug_level > 2)
944 DisplayAdaptedChar(Blob, IClass);
947 MakeNewTemporaryConfig(adaptive_templates, ClassId, FontinfoId,
948 NumFeatures, IntFeatures, FloatFeatures);
949 if (NewTempConfigId >= 0 &&
950 TempConfigReliable(ClassId,
TempConfigFor(Class, NewTempConfigId))) {
951 MakePermanent(adaptive_templates, ClassId, NewTempConfigId, Blob);
952 UpdateAmbigsGroup(ClassId, Blob);
955 #ifndef GRAPHICS_DISABLED 956 if (classify_learning_debug_level > 1) {
957 DisplayAdaptedChar(Blob, IClass);
966 #ifndef GRAPHICS_DISABLED 972 if (sample == NULL)
return;
975 im_.Match(int_class, AllProtosOn, AllConfigsOn,
976 bl_features.
size(), &bl_features[0],
977 &int_result, classify_adapt_feature_threshold,
978 NO_DEBUG, matcher_debug_separate_windows);
979 tprintf(
"Best match to temp config %d = %4.1f%%.\n",
980 int_result.config, int_result.rating * 100.0);
981 if (classify_learning_debug_level >= 2) {
983 ConfigMask = 1 << int_result.config;
985 im_.Match(int_class, AllProtosOn, (
BIT_VECTOR)&ConfigMask,
986 bl_features.
size(), &bl_features[0],
987 &int_result, classify_adapt_feature_threshold,
988 6 | 0x19, matcher_debug_separate_windows);
1018 int old_match = FindScoredUnichar(new_result.
unichar_id, *results);
1022 new_result.
rating <= results->
match[old_match].rating))
1025 if (!unicharset.get_fragment(new_result.
unichar_id))
1028 if (old_match < results->
match.
size()) {
1029 results->
match[old_match].rating = new_result.
rating;
1040 !unicharset.get_fragment(new_result.
unichar_id)) {
1070 void Classify::AmbigClassifier(
1078 if (int_features.
empty())
return;
1079 uinT8* CharNormArray =
new uinT8[unicharset.size()];
1082 results->
BlobLength = GetCharNormFeature(fx_info, templates, NULL,
1084 bool debug = matcher_debug_level >= 2 || classify_debug_level > 1;
1090 while (*ambiguities >= 0) {
1095 AllProtosOn, AllConfigsOn,
1096 int_features.
size(), &int_features[0],
1098 classify_adapt_feature_threshold,
NO_DEBUG,
1099 matcher_debug_separate_windows);
1101 ExpandShapesAndApplyCorrections(NULL, debug, class_id, bottom, top, 0,
1104 CharNormArray, &int_result, results);
1107 delete [] CharNormArray;
1116 const uinT8* norm_factors,
1119 int matcher_multiplier,
1120 const TBOX& blob_box,
1123 int top = blob_box.
top();
1124 int bottom = blob_box.
bottom();
1126 for (
int c = 0; c < results.
size(); c++) {
1127 CLASS_ID class_id = results[c].Class;
1136 num_features, features,
1137 &int_result, classify_adapt_feature_threshold, debug,
1138 matcher_debug_separate_windows);
1139 bool debug = matcher_debug_level >= 2 || classify_debug_level > 1;
1140 ExpandShapesAndApplyCorrections(classes, debug, class_id, bottom, top,
1143 matcher_multiplier, norm_factors,
1144 &int_result, final_results);
1153 void Classify::ExpandShapesAndApplyCorrections(
1154 ADAPT_CLASS* classes,
bool debug,
int class_id,
int bottom,
int top,
1155 float cp_rating,
int blob_length,
int matcher_multiplier,
1156 const uinT8* cn_factors,
1158 if (classes != NULL) {
1161 for (
int f = 0; f < int_result->
fonts.size(); ++f) {
1162 int_result->
fonts[f].fontinfo_id =
1163 GetFontinfoId(classes[class_id], int_result->
fonts[f].fontinfo_id);
1168 for (
int f = 0; f < int_result->
fonts.size(); ++f) {
1169 int_result->
fonts[f].fontinfo_id =
1170 ClassAndConfigIDToFontOrShapeID(class_id,
1171 int_result->
fonts[f].fontinfo_id);
1173 if (shape_table_ != NULL) {
1182 for (
int f = 0; f < int_result->
fonts.size(); ++f) {
1183 int shape_id = int_result->
fonts[f].fontinfo_id;
1184 const Shape& shape = shape_table_->GetShape(shape_id);
1185 for (
int c = 0; c < shape.
size(); ++c) {
1186 int unichar_id = shape[c].unichar_id;
1187 if (!unicharset.get_enabled(unichar_id))
continue;
1190 for (r = 0; r < mapped_results.
size() &&
1191 mapped_results[r].unichar_id != unichar_id; ++r) {}
1192 if (r == mapped_results.
size()) {
1194 mapped_results[r].unichar_id = unichar_id;
1195 mapped_results[r].fonts.
truncate(0);
1197 for (
int i = 0; i < shape[c].font_ids.
size(); ++i) {
1203 for (
int m = 0; m < mapped_results.
size(); ++m) {
1204 mapped_results[m].rating =
1205 ComputeCorrectedRating(debug, mapped_results[m].unichar_id,
1206 cp_rating, int_result->
rating,
1208 blob_length, matcher_multiplier, cn_factors);
1209 AddNewResult(mapped_results[m], final_results);
1214 if (unicharset.get_enabled(class_id)) {
1215 int_result->
rating = ComputeCorrectedRating(debug, class_id, cp_rating,
1218 bottom, top, blob_length,
1219 matcher_multiplier, cn_factors);
1220 AddNewResult(*int_result, final_results);
1227 double Classify::ComputeCorrectedRating(
bool debug,
int unichar_id,
1228 double cp_rating,
double im_rating,
1230 int bottom,
int top,
1231 int blob_length,
int matcher_multiplier,
1232 const uinT8* cn_factors) {
1234 double cn_corrected = im_.ApplyCNCorrection(1.0 - im_rating, blob_length,
1235 cn_factors[unichar_id],
1236 matcher_multiplier);
1237 double miss_penalty = tessedit_class_miss_scale * feature_misses;
1238 double vertical_penalty = 0.0;
1240 if (!unicharset.get_isalpha(unichar_id) &&
1241 !unicharset.get_isdigit(unichar_id) &&
1242 cn_factors[unichar_id] != 0 && classify_misfit_junk_penalty > 0.0) {
1243 int min_bottom, max_bottom, min_top, max_top;
1244 unicharset.get_top_bottom(unichar_id, &min_bottom, &max_bottom,
1245 &min_top, &max_top);
1247 tprintf(
"top=%d, vs [%d, %d], bottom=%d, vs [%d, %d]\n",
1248 top, min_top, max_top, bottom, min_bottom, max_bottom);
1250 if (top < min_top || top > max_top ||
1251 bottom < min_bottom || bottom > max_bottom) {
1252 vertical_penalty = classify_misfit_junk_penalty;
1255 double result = 1.0 - (cn_corrected + miss_penalty + vertical_penalty);
1259 tprintf(
"%s: %2.1f%%(CP%2.1f, IM%2.1f + CN%.2f(%d) + MP%2.1f + VP%2.1f)\n",
1260 unicharset.id_to_unichar(unichar_id),
1263 (1.0 - im_rating) * 100.0,
1264 (cn_corrected - (1.0 - im_rating)) * 100.0,
1265 cn_factors[unichar_id],
1266 miss_penalty * 100.0,
1267 vertical_penalty * 100.0);
1296 if (int_features.
empty())
return NULL;
1297 uinT8* CharNormArray =
new uinT8[unicharset.size()];
1298 ClearCharNormArray(CharNormArray);
1301 PruneClasses(Templates->
Templates, int_features.
size(), -1, &int_features[0],
1302 CharNormArray, BaselineCutoffs, &Results->
CPResults);
1304 if (matcher_debug_level >= 2 || classify_debug_level > 1)
1307 MasterMatcher(Templates->
Templates, int_features.
size(), &int_features[0],
1309 Templates->
Class, matcher_debug_flags, 0,
1312 delete [] CharNormArray;
1317 return Templates->
Class[ClassId]->
1341 int Classify::CharNormClassifier(
TBLOB *blob,
1348 static_classifier_->UnicharClassifySample(sample, blob->
denorm().
pix(), 0,
1349 -1, &unichar_results);
1351 for (
int r = 0; r < unichar_results.size(); ++r) {
1352 AddNewResult(unichar_results[r], adapt_results);
1359 int Classify::CharNormTrainingSample(
bool pruner_only,
1374 uinT8* char_norm_array =
new uinT8[unicharset.size()];
1375 int num_pruner_classes =
MAX(unicharset.size(),
1376 PreTrainedTemplates->NumClasses);
1377 uinT8* pruner_norm_array =
new uinT8[num_pruner_classes];
1380 ComputeCharNormArrays(norm_feature, PreTrainedTemplates, char_norm_array,
1383 PruneClasses(PreTrainedTemplates, num_features, keep_this, sample.
features(),
1385 shape_table_ != NULL ? &shapetable_cutoffs_[0] : CharNormCutoffs,
1387 delete [] pruner_norm_array;
1388 if (keep_this >= 0) {
1389 adapt_results->
CPResults[0].Class = keep_this;
1395 int class_id = adapt_results->
CPResults[i].Class;
1400 MasterMatcher(PreTrainedTemplates, num_features, sample.
features(),
1402 NULL, matcher_debug_flags,
1404 blob_box, adapt_results->
CPResults, adapt_results);
1406 for (
int i = 0; i < adapt_results->
match.
size(); i++) {
1409 results->
sort(&UnicharRating::SortDescendingRating);
1411 delete [] char_norm_array;
1412 delete adapt_results;
1413 return num_features;
1433 float rating = results->
BlobLength / matcher_avg_noise_size;
1435 rating /= 1.0 + rating;
1446 void Classify::ConvertMatchesToChoices(
const DENORM& denorm,
const TBOX& box,
1448 BLOB_CHOICE_LIST *Choices) {
1449 assert(Choices != NULL);
1452 BLOB_CHOICE_IT temp_it;
1453 bool contains_nonfrag =
false;
1454 temp_it.set_to_list(Choices);
1455 int choices_length = 0;
1462 if (shape_table_ != NULL) {
1463 max_matches = shape_table_->MaxNumUnichars() * 2;
1469 for (
int i = 0; i < Results->
match.
size(); i++) {
1471 bool adapted = result.
adapted;
1472 bool current_is_frag = (unicharset.get_fragment(result.
unichar_id) != NULL);
1473 if (temp_it.length()+1 == max_matches &&
1474 !contains_nonfrag && current_is_frag) {
1486 Rating = Certainty = (1.0f - result.
rating);
1487 Rating *= rating_scale * Results->
BlobLength;
1488 Certainty *= -(getDict().certainty_scale);
1495 if (Certainty > best_certainty) {
1496 best_certainty =
MIN(Certainty, classify_adapted_pruning_threshold);
1497 }
else if (adapted &&
1498 Certainty / classify_adapted_pruning_factor < best_certainty) {
1502 float min_xheight, max_xheight, yshift;
1504 &min_xheight, &max_xheight, &yshift);
1508 min_xheight, max_xheight, yshift,
1512 temp_it.add_to_end(choice);
1513 contains_nonfrag |= !current_is_frag;
1515 if (choices_length >= max_matches)
break;
1522 #ifndef GRAPHICS_DISABLED 1533 void Classify::DebugAdaptiveClassifier(
TBLOB *blob,
1535 if (static_classifier_ == NULL)
return;
1540 if (sample == NULL)
return;
1541 static_classifier_->DebugDisplay(*sample, blob->
denorm().
pix(),
1577 if (sample == NULL)
return;
1579 if (AdaptedTemplates->NumPermClasses < matcher_permanent_classes_min ||
1581 CharNormClassifier(Blob, *sample, Results);
1583 Ambiguities = BaselineClassifier(Blob, bl_features, fx_info,
1584 AdaptedTemplates, Results);
1587 matcher_reliable_adaptive_result) &&
1588 !tess_bn_matching) ||
1590 CharNormClassifier(Blob, *sample, Results);
1591 }
else if (Ambiguities && *Ambiguities >= 0 && !tess_bn_matching) {
1592 AmbigClassifier(bl_features, fx_info, Blob,
1593 PreTrainedTemplates,
1594 AdaptedTemplates->Class,
1605 ClassifyAsNoise(Results);
1638 if (sample == NULL) {
1643 CharNormClassifier(Blob, *sample, Results);
1645 RemoveBadMatches(Results);
1646 Results->
match.
sort(&UnicharRating::SortDescendingRating);
1653 Results->
match[0].unichar_id != CorrectClass)) {
1654 for (i = 0; i < Results->
match.
size(); i++)
1655 Ambiguities[i] = Results->
match[i].unichar_id;
1656 Ambiguities[i] = -1;
1658 Ambiguities[0] = -1;
1667 bool Classify::LooksLikeGarbage(
TBLOB *blob) {
1668 BLOB_CHOICE_LIST *ratings =
new BLOB_CHOICE_LIST();
1669 AdaptiveClassifier(blob, ratings);
1670 BLOB_CHOICE_IT ratings_it(ratings);
1671 const UNICHARSET &unicharset = getDict().getUnicharset();
1672 if (classify_debug_character_fragments) {
1674 ratings, unicharset);
1676 for (ratings_it.mark_cycle_pt(); !ratings_it.cycled_list();
1677 ratings_it.forward()) {
1678 if (unicharset.
get_fragment(ratings_it.data()->unichar_id()) != NULL) {
1681 float certainty = ratings_it.data()->certainty();
1684 classify_character_fragments_garbage_certainty_threshold;
1716 uinT8* pruner_norm_array,
1717 uinT8* char_norm_array) {
1727 ComputeCharNormArrays(norm_feature, templates, char_norm_array,
1736 uinT8* char_norm_array,
1737 uinT8* pruner_array) {
1738 ComputeIntCharNormArray(*norm_feature, char_norm_array);
1739 if (pruner_array != NULL) {
1740 if (shape_table_ == NULL) {
1741 ComputeIntCharNormArray(*norm_feature, pruner_array);
1744 templates->
NumClasses *
sizeof(pruner_array[0]));
1747 for (
int id = 0;
id < templates->
NumClasses; ++id) {
1749 const FontSet &fs = fontset_table_.get(font_set_id);
1750 for (
int config = 0; config < fs.
size; ++config) {
1751 const Shape& shape = shape_table_->GetShape(fs.
configs[config]);
1752 for (
int c = 0; c < shape.
size(); ++c) {
1753 if (char_norm_array[shape[c].unichar_id] < pruner_array[
id])
1754 pruner_array[id] = char_norm_array[shape[c].unichar_id];
1790 int MaxProtoId, OldMaxProtoId;
1798 if (classify_learning_debug_level >= 3)
1803 Class = Templates->
Class[ClassId];
1806 ++NumAdaptationsFailed;
1807 if (classify_learning_debug_level >= 1)
1808 cprintf(
"Cannot make new temporary config: maximum number exceeded.\n");
1814 NumOldProtos = im_.FindGoodProtos(IClass, AllProtosOn, AllConfigsOff,
1815 BlobLength, NumFeatures, Features,
1816 OldProtos, classify_adapt_proto_threshold,
1821 for (i = 0; i < NumOldProtos; i++)
1822 SET_BIT(TempProtoMask, OldProtos[i]);
1824 NumBadFeatures = im_.FindBadFeatures(IClass, TempProtoMask, AllConfigsOn,
1825 BlobLength, NumFeatures, Features,
1827 classify_adapt_feature_threshold,
1830 MaxProtoId = MakeNewTempProtos(FloatFeatures, NumBadFeatures, BadFeatures,
1831 IClass, Class, TempProtoMask);
1833 ++NumAdaptationsFailed;
1834 if (classify_learning_debug_level >= 1)
1835 cprintf(
"Cannot make new temp protos: maximum number exceeded.\n");
1845 if (classify_learning_debug_level >= 1)
1846 cprintf(
"Making new temp config %d fontinfo id %d" 1847 " using %d old and %d new protos.\n",
1849 NumOldProtos, MaxProtoId - OldMaxProtoId);
1892 for (ProtoStart = BadFeat, LastBad = ProtoStart + NumBadFeat;
1893 ProtoStart < LastBad; ProtoStart = ProtoEnd) {
1894 F1 = Features->
Features[*ProtoStart];
1899 for (ProtoEnd = ProtoStart + 1,
1903 F2 = Features->
Features[*ProtoEnd];
1908 AngleDelta = fabs(A1 - A2);
1909 if (AngleDelta > 0.5)
1910 AngleDelta = 1.0 - AngleDelta;
1912 if (AngleDelta > matcher_clustering_max_angle_delta ||
1913 fabs(X1 - X2) > SegmentLength ||
1914 fabs(Y1 - Y2) > SegmentLength)
1918 F2 = Features->
Features[*(ProtoEnd - 1)];
1928 Proto = &(TempProto->
Proto);
1933 Proto->
Length = SegmentLength;
1935 Proto->
X = (X1 + X2) / 2.0;
1942 ConvertProto(Proto, Pid, IClass);
1944 classify_learning_debug_level >= 2);
1973 Class = Templates->
Class[ClassId];
1982 Ambigs = GetAmbiguities(Blob, ClassId);
1998 if (classify_learning_debug_level >= 1) {
1999 tprintf(
"Making config %d for %s (ClassId %d) permanent:" 2000 " fontinfo id %d, ambiguities '",
2001 ConfigId, getDict().getUnicharset().debug_str(ClassId).
string(),
2004 *AmbigsPointer >= 0; ++AmbigsPointer)
2005 tprintf(
"%s", unicharset.id_to_unichar(*AmbigsPointer));
2063 for (
int i = 0; i < results.
match.
size(); ++i) {
2064 tprintf(
"%s ", unicharset.debug_str(results.
match[i].unichar_id).string());
2065 results.
match[i].Print();
2088 static const char* romans =
"i v x I V X";
2089 BadMatchThreshold = Results->
best_rating - matcher_bad_match_pad;
2091 if (classify_bln_numeric_mode) {
2092 UNICHAR_ID unichar_id_one = unicharset.contains_unichar(
"1") ?
2093 unicharset.unichar_to_id(
"1") : -1;
2094 UNICHAR_ID unichar_id_zero = unicharset.contains_unichar(
"0") ?
2095 unicharset.unichar_to_id(
"0") : -1;
2096 float scored_one = ScoredUnichar(unichar_id_one, *Results);
2097 float scored_zero = ScoredUnichar(unichar_id_zero, *Results);
2099 for (Next = NextGood = 0; Next < Results->
match.
size(); Next++) {
2101 if (match.
rating >= BadMatchThreshold) {
2102 if (!unicharset.get_isalpha(match.
unichar_id) ||
2104 unicharset.id_to_unichar(match.
unichar_id)) != NULL) {
2105 }
else if (unicharset.eq(match.
unichar_id,
"l") &&
2106 scored_one < BadMatchThreshold) {
2107 Results->
match[Next].unichar_id = unichar_id_one;
2108 }
else if (unicharset.eq(match.
unichar_id,
"O") &&
2109 scored_zero < BadMatchThreshold) {
2110 Results->
match[Next].unichar_id = unichar_id_zero;
2112 Results->
match[Next].unichar_id = INVALID_UNICHAR_ID;
2114 if (Results->
match[Next].unichar_id != INVALID_UNICHAR_ID) {
2115 if (NextGood == Next) {
2118 Results->
match[NextGood++] = Results->
match[Next];
2124 for (Next = NextGood = 0; Next < Results->
match.
size(); Next++) {
2125 if (Results->
match[Next].rating >= BadMatchThreshold) {
2126 if (NextGood == Next) {
2129 Results->
match[NextGood++] = Results->
match[Next];
2152 static char punc_chars[] =
". , ; : / ` ~ ' - = \\ | \" ! _ ^";
2153 static char digit_chars[] =
"0 1 2 3 4 5 6 7 8 9";
2157 for (Next = NextGood = 0; Next < Results->
match.
size(); Next++) {
2160 if (strstr(punc_chars,
2161 unicharset.id_to_unichar(match.
unichar_id)) != NULL) {
2162 if (punc_count >= 2)
2166 if (strstr(digit_chars,
2167 unicharset.id_to_unichar(match.
unichar_id)) != NULL) {
2168 if (digit_count >= 1)
2174 if (NextGood == Next) {
2199 Threshold = (Threshold == matcher_good_threshold) ? 0.9: (1.0 - Threshold);
2200 classify_adapt_proto_threshold.set_value(
2201 ClipToRange<int>(255 * Threshold, 0, 255));
2202 classify_adapt_feature_threshold.set_value(
2203 ClipToRange<int>(255 * Threshold, 0, 255));
2219 void Classify::ShowBestMatchFor(
int shape_id,
2222 #ifndef GRAPHICS_DISABLED 2225 tprintf(
"No built-in templates for class/shape %d\n", shape_id);
2228 if (num_features <= 0) {
2229 tprintf(
"Illegal blob (char norm features)!\n");
2233 classify_norm_method.set_value(
character);
2235 AllProtosOn, AllConfigsOn,
2236 num_features, features, &cn_result,
2237 classify_adapt_feature_threshold,
NO_DEBUG,
2238 matcher_debug_separate_windows);
2240 config_mask = 1 << cn_result.
config;
2242 tprintf(
"Static Shape ID: %d\n", shape_id);
2245 AllProtosOn, &config_mask,
2246 num_features, features, &cn_result,
2247 classify_adapt_feature_threshold,
2248 matcher_debug_flags,
2249 matcher_debug_separate_windows);
2251 #endif // GRAPHICS_DISABLED 2257 int class_id,
int config_id)
const {
2259 if (templates == PreTrainedTemplates && shape_table_ != NULL) {
2260 int shape_id = ClassAndConfigIDToFontOrShapeID(class_id, config_id);
2261 class_string = shape_table_->DebugStr(shape_id);
2263 class_string = unicharset.debug_str(class_id);
2265 return class_string;
2269 int Classify::ClassAndConfigIDToFontOrShapeID(
int class_id,
2270 int int_result_config)
const {
2271 int font_set_id = PreTrainedTemplates->Class[class_id]->font_set_id;
2273 if (font_set_id < 0)
2274 return kBlankFontinfoId;
2275 const FontSet &fs = fontset_table_.get(font_set_id);
2277 return fs.
configs[int_result_config];
2282 int Classify::ShapeIDToClassID(
int shape_id)
const {
2283 for (
int id = 0;
id < PreTrainedTemplates->NumClasses; ++id) {
2284 int font_set_id = PreTrainedTemplates->Class[id]->font_set_id;
2286 const FontSet &fs = fontset_table_.get(font_set_id);
2287 for (
int config = 0; config < fs.
size; ++config) {
2288 if (fs.
configs[config] == shape_id)
2292 tprintf(
"Shape %d not found\n", shape_id);
2300 if (classify_learning_debug_level >= 1) {
2301 tprintf(
"NumTimesSeen for config of %s is %d\n",
2302 getDict().getUnicharset().debug_str(class_id).
string(),
2305 if (config->
NumTimesSeen >= matcher_sufficient_examples_for_prototyping) {
2307 }
else if (config->
NumTimesSeen < matcher_min_examples_for_prototyping) {
2309 }
else if (use_ambigs_for_adaption) {
2313 getDict().getUnicharAmbigs().AmbigsForAdaption(class_id);
2314 int ambigs_size = (ambigs == NULL) ? 0 : ambigs->
size();
2315 for (
int ambig = 0; ambig < ambigs_size; ++ambig) {
2316 ADAPT_CLASS ambig_class = AdaptedTemplates->Class[(*ambigs)[ambig]];
2317 assert(ambig_class != NULL);
2320 matcher_min_examples_for_prototyping) {
2321 if (classify_learning_debug_level >= 1) {
2322 tprintf(
"Ambig %s has not been seen enough times," 2323 " not making config for %s permanent\n",
2324 getDict().getUnicharset().debug_str(
2325 (*ambigs)[ambig]).
string(),
2326 getDict().getUnicharset().debug_str(class_id).
string());
2337 getDict().getUnicharAmbigs().ReverseAmbigsForAdaption(class_id);
2338 int ambigs_size = (ambigs == NULL) ? 0 : ambigs->
size();
2339 if (classify_learning_debug_level >= 1) {
2340 tprintf(
"Running UpdateAmbigsGroup for %s class_id=%d\n",
2341 getDict().getUnicharset().debug_str(class_id).
string(), class_id);
2343 for (
int ambig = 0; ambig < ambigs_size; ++ambig) {
2344 CLASS_ID ambig_class_id = (*ambigs)[ambig];
2345 const ADAPT_CLASS ambigs_class = AdaptedTemplates->Class[ambig_class_id];
2349 TempConfigFor(AdaptedTemplates->Class[ambig_class_id], cfg);
2350 if (config != NULL && TempConfigReliable(ambig_class_id, config)) {
2351 if (classify_learning_debug_level >= 1) {
2352 tprintf(
"Making config %d of %s permanent\n", cfg,
2353 getDict().getUnicharset().debug_str(
2354 ambig_class_id).
string());
2356 MakePermanent(AdaptedTemplates, ambig_class_id, cfg, Blob);
void AddProtoToClassPruner(PROTO Proto, CLASS_ID ClassId, INT_TEMPLATES Templates)
#define ADAPT_TEMPLATE_SUFFIX
void FreeBitVector(BIT_VECTOR BitVector)
GenericVector< CP_RESULT_STRUCT > CPResults
FEATURE_STRUCT * GetCNFeature() const
const FEATURE_DESC_STRUCT CharNormDesc
void plot(ScrollView *window)
static void BreakPieces(const GenericVector< SEAM *> &seams, const GenericVector< TBLOB *> &blobs, int first, int last)
#define ADAPTABLE_WERD_ADJUSTMENT
int MakeTempProtoPerm(void *item1, void *item2)
GenericVector< int > best_state
const CHAR_FRAGMENT * get_fragment(UNICHAR_ID unichar_id) const
WERD_CHOICE * best_choice
void FreeFeatureSet(FEATURE_SET FeatureSet)
TEMP_CONFIG NewTempConfig(int MaxProtoId, int FontinfoId)
#define UnusedClassIdIn(T, c)
NORM_PROTOS * ReadNormProtos(FILE *File)
#define WordsInVectorOfSize(NumBits)
#define LENGTH_COMPRESSION
#define UNLIKELY_NUM_FEAT
#define copy_all_bits(source, dest, length)
#define PRINT_MATCH_SUMMARY
const double kStandardFeatureLength
GenericVector< STRING > correct_text
#define WORST_POSSIBLE_RATING
void XHeightRange(int unichar_id, const UNICHARSET &unicharset, const TBOX &bbox, float *min_xht, float *max_xht, float *yshift) const
void FreeTempConfig(TEMP_CONFIG Config)
PERM_CONFIG_STRUCT * PERM_CONFIG
const STRING debug_string() const
GenericVector< ScoredFont > fonts
void FillABC(PROTO Proto)
const char * string() const
const int kBlnBaselineOffset
FLOAT32 ActualOutlineLength(FEATURE Feature)
#define PermConfigFor(Class, ConfigId)
#define MakeConfigPermanent(Class, ConfigId)
#define PRINT_FEATURE_MATCHES
void set_fonts(const GenericVector< tesseract::ScoredFont > &fonts)
int IntCastRounded(double x)
#define IncreaseConfidence(TempConfig)
bool disable_character_fragments
TEMP_PROTO_STRUCT * TEMP_PROTO
BIT_VECTOR NewBitVector(int NumBits)
int classify_integer_matcher_multiplier
#define ClassForClassId(T, c)
bool MarginalMatch(float confidence, float matcher_great_threshold)
const DENORM & denorm() const
LIST delete_d(LIST list, void *key, int_compare is_equal)
static void JoinPieces(const GenericVector< SEAM *> &seams, const GenericVector< TBLOB *> &blobs, int first, int last)
TrainingSample * BlobToTrainingSample(const TBLOB &blob, bool nonlinear_norm, INT_FX_RESULT_STRUCT *fx_info, GenericVector< INT_FEATURE_STRUCT > *bl_features)
int AddIntConfig(INT_CLASS Class)
void free_adapted_templates(ADAPT_TEMPLATES templates)
#define zero_all_bits(array, length)
bool GetComponent(TessdataType type, TFile *fp)
TBLOB * ClassifyNormalizeIfNeeded() const
const FontInfo * fontinfo
char window_wait(ScrollView *win)
FEATURE NewFeature(const FEATURE_DESC_STRUCT *FeatureDesc)
#define set_all_bits(array, length)
void InitMatcherRatings(register FLOAT32 *Rating)
#define TempConfigFor(Class, ConfigId)
INT_CLASS Class[MAX_NUM_CLASSES]
int AddIntProto(INT_CLASS Class)
TEMP_PROTO NewTempProto()
void FreeTempProto(void *arg)
bool AlternativeChoiceAdjustmentsWorseThan(float threshold) const
void AddProtoToProtoPruner(PROTO Proto, int ProtoId, INT_CLASS Class, bool debug)
void FreeFeature(FEATURE Feature)
void SetAdaptiveThreshold(FLOAT32 Threshold)
int geo_feature(int index) const
void ConvertConfig(BIT_VECTOR Config, int ConfigId, INT_CLASS Class)
GenericVector< TBLOB * > blobs
void plot(ScrollView *window, ScrollView::Color color, ScrollView::Color child_color)
#define test_bit(array, bit)
#define MAX_NUM_INT_FEATURES
ADAPT_CLASS Class[MAX_NUM_CLASSES]
float adjust_factor() const
bool Open(const STRING &filename, FileReader reader)
const INT_FEATURE_STRUCT * features() const
GenericVector< UnicharRating > match
int outline_length() const
void cprintf(const char *format,...)
void ComputeAdaptionThresholds(float certainty_scale, float min_rating, float max_rating, float rating_margin, float *thresholds)
#define IsEmptyAdaptedClass(Class)
#define SET_BIT(array, bit)
void print_ratings_list(const char *msg, BLOB_CHOICE_LIST *ratings, const UNICHARSET ¤t_unicharset)
#define GetPicoFeatureLength()
void free_int_templates(INT_TEMPLATES templates)
#define ConfigIsPermanent(Class, ConfigId)
UNICHAR_ID best_unichar_id
INT_FEATURE_STRUCT INT_FEATURE_ARRAY[MAX_NUM_INT_FEATURES]
ADAPT_TEMPLATES Templates
TBOX bounding_box() const
#define MAX_ADAPTABLE_WERD_SIZE
LIST push(LIST list, void *element)
#define reset_bit(array, bit)
#define PRINT_PROTO_MATCHES
void UpdateMatchDisplay()
#define MakeProtoPermanent(Class, ProtoId)
TBOX bounding_box() const
GenericVector< SEAM * > seam_array
bool PiecesAllNatural(int start, int count) const