Fix some typos:
hypennation_decompunder -> hyphenation_decompounder only_longest_max -> only_longest_match
This commit is contained in:
parent
b653d149d1
commit
37f08ea8b8
|
@ -393,7 +393,7 @@ public class AnalysisModule extends AbstractModule {
|
||||||
tokenFiltersBindings.processTokenFilter("pattern_replace", PatternReplaceTokenFilterFactory.class);
|
tokenFiltersBindings.processTokenFilter("pattern_replace", PatternReplaceTokenFilterFactory.class);
|
||||||
tokenFiltersBindings.processTokenFilter("phonetic", PhoneticTokenFilterFactory.class);
|
tokenFiltersBindings.processTokenFilter("phonetic", PhoneticTokenFilterFactory.class);
|
||||||
tokenFiltersBindings.processTokenFilter("dictionary_decompounder", DictionaryCompoundWordTokenFilterFactory.class);
|
tokenFiltersBindings.processTokenFilter("dictionary_decompounder", DictionaryCompoundWordTokenFilterFactory.class);
|
||||||
tokenFiltersBindings.processTokenFilter("hypennation_decompounder", HyphenationCompoundWordTokenFilterFactory.class);
|
tokenFiltersBindings.processTokenFilter("hyphenation_decompounder", HyphenationCompoundWordTokenFilterFactory.class);
|
||||||
|
|
||||||
tokenFiltersBindings.processTokenFilter("arabic_stem", ArabicStemTokenFilterFactory.class);
|
tokenFiltersBindings.processTokenFilter("arabic_stem", ArabicStemTokenFilterFactory.class);
|
||||||
tokenFiltersBindings.processTokenFilter("brazilian_stem", BrazilianStemTokenFilterFactory.class);
|
tokenFiltersBindings.processTokenFilter("brazilian_stem", BrazilianStemTokenFilterFactory.class);
|
||||||
|
|
|
@ -52,7 +52,7 @@ public abstract class AbstractCompoundWordTokenFilterFactory extends AbstractTok
|
||||||
minWordSize = settings.getAsInt("min_word_size", CompoundWordTokenFilterBase.DEFAULT_MIN_WORD_SIZE);
|
minWordSize = settings.getAsInt("min_word_size", CompoundWordTokenFilterBase.DEFAULT_MIN_WORD_SIZE);
|
||||||
minSubwordSize = settings.getAsInt("min_subword_size", CompoundWordTokenFilterBase.DEFAULT_MIN_SUBWORD_SIZE);
|
minSubwordSize = settings.getAsInt("min_subword_size", CompoundWordTokenFilterBase.DEFAULT_MIN_SUBWORD_SIZE);
|
||||||
maxSubwordSize = settings.getAsInt("max_subword_size", CompoundWordTokenFilterBase.DEFAULT_MAX_SUBWORD_SIZE);
|
maxSubwordSize = settings.getAsInt("max_subword_size", CompoundWordTokenFilterBase.DEFAULT_MAX_SUBWORD_SIZE);
|
||||||
onlyLongestMatch = settings.getAsBoolean("only_longest_max", false);
|
onlyLongestMatch = settings.getAsBoolean("only_longest_match", false);
|
||||||
wordList = Analysis.getWordSet(env, settings, "word_list");
|
wordList = Analysis.getWordSet(env, settings, "word_list");
|
||||||
if (wordList == null) {
|
if (wordList == null) {
|
||||||
throw new ElasticSearchIllegalArgumentException("word_list must be provided for [" + name + "], either as a path to a file, or directly");
|
throw new ElasticSearchIllegalArgumentException("word_list must be provided for [" + name + "], either as a path to a file, or directly");
|
||||||
|
|
Loading…
Reference in New Issue