Handle some deprecation warnings

Suppress lots of them with comments. Fix a few. Removes lots of pre-built
stuff used to support pre-2.0 indexes.
This commit is contained in:
Nik Everett 2016-01-07 22:45:05 -05:00
parent dcd8a8207f
commit d8526f4d00
6 changed files with 38 additions and 135 deletions

View File

@ -20,9 +20,7 @@
package org.elasticsearch.index.analysis; package org.elasticsearch.index.analysis;
import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.analysis.ngram.Lucene43NGramTokenizer;
import org.apache.lucene.analysis.ngram.NGramTokenizer; import org.apache.lucene.analysis.ngram.NGramTokenizer;
import org.apache.lucene.util.Version;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment; import org.elasticsearch.env.Environment;
import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexSettings;
@ -43,7 +41,6 @@ public class NGramTokenizerFactory extends AbstractTokenizerFactory {
private final int minGram; private final int minGram;
private final int maxGram; private final int maxGram;
private final CharMatcher matcher; private final CharMatcher matcher;
private org.elasticsearch.Version esVersion;
static final Map<String, CharMatcher> MATCHERS; static final Map<String, CharMatcher> MATCHERS;
@ -92,30 +89,19 @@ public class NGramTokenizerFactory extends AbstractTokenizerFactory {
this.minGram = settings.getAsInt("min_gram", NGramTokenizer.DEFAULT_MIN_NGRAM_SIZE); this.minGram = settings.getAsInt("min_gram", NGramTokenizer.DEFAULT_MIN_NGRAM_SIZE);
this.maxGram = settings.getAsInt("max_gram", NGramTokenizer.DEFAULT_MAX_NGRAM_SIZE); this.maxGram = settings.getAsInt("max_gram", NGramTokenizer.DEFAULT_MAX_NGRAM_SIZE);
this.matcher = parseTokenChars(settings.getAsArray("token_chars")); this.matcher = parseTokenChars(settings.getAsArray("token_chars"));
this.esVersion = indexSettings.getIndexVersionCreated();
} }
@SuppressWarnings("deprecation")
@Override @Override
public Tokenizer create() { public Tokenizer create() {
if (version.onOrAfter(Version.LUCENE_4_3) && esVersion.onOrAfter(org.elasticsearch.Version.V_0_90_2)) { if (matcher == null) {
/* return new NGramTokenizer(minGram, maxGram);
* We added this in 0.90.2 but 0.90.1 used LUCENE_43 already so we can not rely on the lucene version.
* Yet if somebody uses 0.90.2 or higher with a prev. lucene version we should also use the deprecated version.
*/
final Version version = this.version == Version.LUCENE_4_3 ? Version.LUCENE_4_4 : this.version; // always use 4.4 or higher
if (matcher == null) {
return new NGramTokenizer(minGram, maxGram);
} else {
return new NGramTokenizer(minGram, maxGram) {
@Override
protected boolean isTokenChar(int chr) {
return matcher.isTokenChar(chr);
}
};
}
} else { } else {
return new Lucene43NGramTokenizer(minGram, maxGram); return new NGramTokenizer(minGram, maxGram) {
@Override
protected boolean isTokenChar(int chr) {
return matcher.isTokenChar(chr);
}
};
} }
} }

View File

@ -604,14 +604,14 @@ public abstract class QueryBuilders {
* Facilitates creating template query requests using an inline script * Facilitates creating template query requests using an inline script
*/ */
public static TemplateQueryBuilder templateQuery(String template, Map<String, Object> vars) { public static TemplateQueryBuilder templateQuery(String template, Map<String, Object> vars) {
return new TemplateQueryBuilder(template, vars); return new TemplateQueryBuilder(new Template(template, ScriptService.ScriptType.INLINE, null, null, vars));
} }
/** /**
* Facilitates creating template query requests * Facilitates creating template query requests
*/ */
public static TemplateQueryBuilder templateQuery(String template, ScriptService.ScriptType templateType, Map<String, Object> vars) { public static TemplateQueryBuilder templateQuery(String template, ScriptService.ScriptType templateType, Map<String, Object> vars) {
return new TemplateQueryBuilder(template, templateType, vars); return new TemplateQueryBuilder(new Template(template, templateType, null, null, vars));
} }
/** /**

View File

@ -118,6 +118,7 @@ import static java.util.Collections.unmodifiableMap;
* </pre> * </pre>
*/ */
public class Store extends AbstractIndexShardComponent implements Closeable, RefCounted { public class Store extends AbstractIndexShardComponent implements Closeable, RefCounted {
private static final Version FIRST_LUCENE_CHECKSUM_VERSION = Version.LUCENE_4_8_0;
static final String CODEC = "store"; static final String CODEC = "store";
static final int VERSION_WRITE_THROWABLE= 2; // we write throwable since 2.0 static final int VERSION_WRITE_THROWABLE= 2; // we write throwable since 2.0
@ -466,7 +467,7 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref
output = new LegacyVerification.LengthVerifyingIndexOutput(output, metadata.length()); output = new LegacyVerification.LengthVerifyingIndexOutput(output, metadata.length());
} else { } else {
assert metadata.writtenBy() != null; assert metadata.writtenBy() != null;
assert metadata.writtenBy().onOrAfter(Version.LUCENE_4_8); assert metadata.writtenBy().onOrAfter(FIRST_LUCENE_CHECKSUM_VERSION);
output = new LuceneVerifyingIndexOutput(metadata, output); output = new LuceneVerifyingIndexOutput(metadata, output);
} }
success = true; success = true;
@ -490,7 +491,7 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref
return directory().openInput(filename, context); return directory().openInput(filename, context);
} }
assert metadata.writtenBy() != null; assert metadata.writtenBy() != null;
assert metadata.writtenBy().onOrAfter(Version.LUCENE_4_8_0); assert metadata.writtenBy().onOrAfter(FIRST_LUCENE_CHECKSUM_VERSION);
return new VerifyingIndexInput(directory().openInput(filename, context)); return new VerifyingIndexInput(directory().openInput(filename, context));
} }
@ -518,7 +519,7 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref
if (input.length() != md.length()) { // first check the length no matter how old this file is if (input.length() != md.length()) { // first check the length no matter how old this file is
throw new CorruptIndexException("expected length=" + md.length() + " != actual length: " + input.length() + " : file truncated?", input); throw new CorruptIndexException("expected length=" + md.length() + " != actual length: " + input.length() + " : file truncated?", input);
} }
if (md.writtenBy() != null && md.writtenBy().onOrAfter(Version.LUCENE_4_8_0)) { if (md.writtenBy() != null && md.writtenBy().onOrAfter(FIRST_LUCENE_CHECKSUM_VERSION)) {
// throw exception if the file is corrupt // throw exception if the file is corrupt
String checksum = Store.digestToString(CodecUtil.checksumEntireFile(input)); String checksum = Store.digestToString(CodecUtil.checksumEntireFile(input));
// throw exception if metadata is inconsistent // throw exception if metadata is inconsistent
@ -766,7 +767,6 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref
*/ */
public final static class MetadataSnapshot implements Iterable<StoreFileMetaData>, Writeable<MetadataSnapshot> { public final static class MetadataSnapshot implements Iterable<StoreFileMetaData>, Writeable<MetadataSnapshot> {
private static final ESLogger logger = Loggers.getLogger(MetadataSnapshot.class); private static final ESLogger logger = Loggers.getLogger(MetadataSnapshot.class);
private static final Version FIRST_LUCENE_CHECKSUM_VERSION = Version.LUCENE_4_8;
private final Map<String, StoreFileMetaData> metadata; private final Map<String, StoreFileMetaData> metadata;
@ -843,6 +843,7 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref
final SegmentInfos segmentCommitInfos = Store.readSegmentsInfo(commit, directory); final SegmentInfos segmentCommitInfos = Store.readSegmentsInfo(commit, directory);
numDocs = Lucene.getNumDocs(segmentCommitInfos); numDocs = Lucene.getNumDocs(segmentCommitInfos);
commitUserDataBuilder.putAll(segmentCommitInfos.getUserData()); commitUserDataBuilder.putAll(segmentCommitInfos.getUserData());
@SuppressWarnings("deprecation")
Version maxVersion = Version.LUCENE_4_0; // we don't know which version was used to write so we take the max version. Version maxVersion = Version.LUCENE_4_0; // we don't know which version was used to write so we take the max version.
for (SegmentCommitInfo info : segmentCommitInfos) { for (SegmentCommitInfo info : segmentCommitInfos) {
final Version version = info.info.getVersion(); final Version version = info.info.getVersion();
@ -907,6 +908,7 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref
* @param directory the directory to read checksums from * @param directory the directory to read checksums from
* @return a map of file checksums and the checksum file version * @return a map of file checksums and the checksum file version
*/ */
@SuppressWarnings("deprecation") // Legacy checksum needs legacy methods
static Tuple<Map<String, String>, Long> readLegacyChecksums(Directory directory) throws IOException { static Tuple<Map<String, String>, Long> readLegacyChecksums(Directory directory) throws IOException {
synchronized (directory) { synchronized (directory) {
long lastFound = -1; long lastFound = -1;
@ -922,10 +924,10 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref
if (lastFound > -1) { if (lastFound > -1) {
try (IndexInput indexInput = directory.openInput(CHECKSUMS_PREFIX + lastFound, IOContext.READONCE)) { try (IndexInput indexInput = directory.openInput(CHECKSUMS_PREFIX + lastFound, IOContext.READONCE)) {
indexInput.readInt(); // version indexInput.readInt(); // version
return new Tuple(indexInput.readStringStringMap(), lastFound); return new Tuple<>(indexInput.readStringStringMap(), lastFound);
} }
} }
return new Tuple(new HashMap<>(), -1l); return new Tuple<>(new HashMap<>(), -1l);
} }
} }
@ -1243,6 +1245,7 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref
} }
} }
@SuppressWarnings("deprecation") // Legacy checksum uses legacy methods
synchronized void writeChecksums(Directory directory, Map<String, String> checksums, long lastVersion) throws IOException { synchronized void writeChecksums(Directory directory, Map<String, String> checksums, long lastVersion) throws IOException {
// Make sure if clock goes backwards we still move version forwards: // Make sure if clock goes backwards we still move version forwards:
long nextVersion = Math.max(lastVersion+1, System.currentTimeMillis()); long nextVersion = Math.max(lastVersion+1, System.currentTimeMillis());

View File

@ -73,15 +73,10 @@ import java.util.Locale;
*/ */
public enum PreBuiltAnalyzers { public enum PreBuiltAnalyzers {
STANDARD(CachingStrategy.ELASTICSEARCH) { // we don't do stopwords anymore from 1.0Beta on STANDARD(CachingStrategy.ELASTICSEARCH) {
@Override @Override
protected Analyzer create(Version version) { protected Analyzer create(Version version) {
final Analyzer a; final Analyzer a = new StandardAnalyzer(CharArraySet.EMPTY_SET);
if (version.onOrAfter(Version.V_1_0_0_Beta1)) {
a = new StandardAnalyzer(CharArraySet.EMPTY_SET);
} else {
a = new StandardAnalyzer();
}
a.setVersion(version.luceneVersion); a.setVersion(version.luceneVersion);
return a; return a;
} }
@ -151,22 +146,14 @@ public enum PreBuiltAnalyzers {
PATTERN(CachingStrategy.ELASTICSEARCH) { PATTERN(CachingStrategy.ELASTICSEARCH) {
@Override @Override
protected Analyzer create(Version version) { protected Analyzer create(Version version) {
if (version.onOrAfter(Version.V_1_0_0_RC1)) { return new PatternAnalyzer(Regex.compile("\\W+" /*PatternAnalyzer.NON_WORD_PATTERN*/, null), true, CharArraySet.EMPTY_SET);
return new PatternAnalyzer(Regex.compile("\\W+" /*PatternAnalyzer.NON_WORD_PATTERN*/, null), true, CharArraySet.EMPTY_SET);
}
return new PatternAnalyzer(Regex.compile("\\W+" /*PatternAnalyzer.NON_WORD_PATTERN*/, null), true, StopAnalyzer.ENGLISH_STOP_WORDS_SET);
} }
}, },
STANDARD_HTML_STRIP(CachingStrategy.ELASTICSEARCH) { STANDARD_HTML_STRIP(CachingStrategy.ELASTICSEARCH) {
@Override @Override
protected Analyzer create(Version version) { protected Analyzer create(Version version) {
final Analyzer analyzer; final Analyzer analyzer = new StandardHtmlStripAnalyzer(CharArraySet.EMPTY_SET);
if (version.onOrAfter(Version.V_1_0_0_RC1)) {
analyzer = new StandardHtmlStripAnalyzer(CharArraySet.EMPTY_SET);
} else {
analyzer = new StandardHtmlStripAnalyzer();
}
analyzer.setVersion(version.luceneVersion); analyzer.setVersion(version.luceneVersion);
return analyzer; return analyzer;
} }

View File

@ -28,7 +28,6 @@ import org.apache.lucene.analysis.ckb.SoraniNormalizationFilter;
import org.apache.lucene.analysis.commongrams.CommonGramsFilter; import org.apache.lucene.analysis.commongrams.CommonGramsFilter;
import org.apache.lucene.analysis.core.DecimalDigitFilter; import org.apache.lucene.analysis.core.DecimalDigitFilter;
import org.apache.lucene.analysis.core.LowerCaseFilter; import org.apache.lucene.analysis.core.LowerCaseFilter;
import org.apache.lucene.analysis.core.Lucene43StopFilter;
import org.apache.lucene.analysis.core.StopAnalyzer; import org.apache.lucene.analysis.core.StopAnalyzer;
import org.apache.lucene.analysis.core.StopFilter; import org.apache.lucene.analysis.core.StopFilter;
import org.apache.lucene.analysis.core.UpperCaseFilter; import org.apache.lucene.analysis.core.UpperCaseFilter;
@ -45,9 +44,6 @@ import org.apache.lucene.analysis.miscellaneous.ASCIIFoldingFilter;
import org.apache.lucene.analysis.miscellaneous.KeywordRepeatFilter; import org.apache.lucene.analysis.miscellaneous.KeywordRepeatFilter;
import org.apache.lucene.analysis.miscellaneous.LengthFilter; import org.apache.lucene.analysis.miscellaneous.LengthFilter;
import org.apache.lucene.analysis.miscellaneous.LimitTokenCountFilter; import org.apache.lucene.analysis.miscellaneous.LimitTokenCountFilter;
import org.apache.lucene.analysis.miscellaneous.Lucene43LengthFilter;
import org.apache.lucene.analysis.miscellaneous.Lucene43TrimFilter;
import org.apache.lucene.analysis.miscellaneous.Lucene47WordDelimiterFilter;
import org.apache.lucene.analysis.miscellaneous.ScandinavianFoldingFilter; import org.apache.lucene.analysis.miscellaneous.ScandinavianFoldingFilter;
import org.apache.lucene.analysis.miscellaneous.ScandinavianNormalizationFilter; import org.apache.lucene.analysis.miscellaneous.ScandinavianNormalizationFilter;
import org.apache.lucene.analysis.miscellaneous.TrimFilter; import org.apache.lucene.analysis.miscellaneous.TrimFilter;
@ -55,8 +51,6 @@ import org.apache.lucene.analysis.miscellaneous.TruncateTokenFilter;
import org.apache.lucene.analysis.miscellaneous.UniqueTokenFilter; import org.apache.lucene.analysis.miscellaneous.UniqueTokenFilter;
import org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter; import org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter;
import org.apache.lucene.analysis.ngram.EdgeNGramTokenFilter; import org.apache.lucene.analysis.ngram.EdgeNGramTokenFilter;
import org.apache.lucene.analysis.ngram.Lucene43EdgeNGramTokenFilter;
import org.apache.lucene.analysis.ngram.Lucene43NGramTokenFilter;
import org.apache.lucene.analysis.ngram.NGramTokenFilter; import org.apache.lucene.analysis.ngram.NGramTokenFilter;
import org.apache.lucene.analysis.payloads.DelimitedPayloadTokenFilter; import org.apache.lucene.analysis.payloads.DelimitedPayloadTokenFilter;
import org.apache.lucene.analysis.payloads.TypeAsPayloadTokenFilter; import org.apache.lucene.analysis.payloads.TypeAsPayloadTokenFilter;
@ -86,49 +80,26 @@ public enum PreBuiltTokenFilters {
WORD_DELIMITER(CachingStrategy.ONE) { WORD_DELIMITER(CachingStrategy.ONE) {
@Override @Override
public TokenStream create(TokenStream tokenStream, Version version) { public TokenStream create(TokenStream tokenStream, Version version) {
if (version.luceneVersion.onOrAfter(org.apache.lucene.util.Version.LUCENE_4_8)) { return new WordDelimiterFilter(tokenStream,
return new WordDelimiterFilter(tokenStream, WordDelimiterFilter.GENERATE_WORD_PARTS |
WordDelimiterFilter.GENERATE_WORD_PARTS | WordDelimiterFilter.GENERATE_NUMBER_PARTS |
WordDelimiterFilter.GENERATE_NUMBER_PARTS | WordDelimiterFilter.SPLIT_ON_CASE_CHANGE |
WordDelimiterFilter.SPLIT_ON_CASE_CHANGE | WordDelimiterFilter.SPLIT_ON_NUMERICS |
WordDelimiterFilter.SPLIT_ON_NUMERICS | WordDelimiterFilter.STEM_ENGLISH_POSSESSIVE, null);
WordDelimiterFilter.STEM_ENGLISH_POSSESSIVE, null);
} else {
return new Lucene47WordDelimiterFilter(tokenStream,
WordDelimiterFilter.GENERATE_WORD_PARTS |
WordDelimiterFilter.GENERATE_NUMBER_PARTS |
WordDelimiterFilter.SPLIT_ON_CASE_CHANGE |
WordDelimiterFilter.SPLIT_ON_NUMERICS |
WordDelimiterFilter.STEM_ENGLISH_POSSESSIVE, null);
}
} }
}, },
STOP(CachingStrategy.LUCENE) { STOP(CachingStrategy.LUCENE) {
@Override @Override
public TokenStream create(TokenStream tokenStream, Version version) { public TokenStream create(TokenStream tokenStream, Version version) {
if (version.luceneVersion.onOrAfter(org.apache.lucene.util.Version.LUCENE_4_4_0)) { return new StopFilter(tokenStream, StopAnalyzer.ENGLISH_STOP_WORDS_SET);
return new StopFilter(tokenStream, StopAnalyzer.ENGLISH_STOP_WORDS_SET);
} else {
@SuppressWarnings("deprecation")
final TokenStream filter = new Lucene43StopFilter(true, tokenStream, StopAnalyzer.ENGLISH_STOP_WORDS_SET);
return filter;
}
} }
}, },
TRIM(CachingStrategy.LUCENE) { TRIM(CachingStrategy.LUCENE) {
@Override @Override
public TokenStream create(TokenStream tokenStream, Version version) { public TokenStream create(TokenStream tokenStream, Version version) {
if (version.luceneVersion.onOrAfter(org.apache.lucene.util.Version.LUCENE_4_4_0)) { return new TrimFilter(tokenStream);
return new TrimFilter(tokenStream);
} else {
@SuppressWarnings("deprecation")
final TokenStream filter = new Lucene43TrimFilter(tokenStream, true);
return filter;
}
} }
}, },
@ -149,13 +120,7 @@ public enum PreBuiltTokenFilters {
LENGTH(CachingStrategy.LUCENE) { LENGTH(CachingStrategy.LUCENE) {
@Override @Override
public TokenStream create(TokenStream tokenStream, Version version) { public TokenStream create(TokenStream tokenStream, Version version) {
if (version.luceneVersion.onOrAfter(org.apache.lucene.util.Version.LUCENE_4_4_0)) { return new LengthFilter(tokenStream, 0, Integer.MAX_VALUE);
return new LengthFilter(tokenStream, 0, Integer.MAX_VALUE);
} else {
@SuppressWarnings("deprecation")
final TokenStream filter = new Lucene43LengthFilter(true, tokenStream, 0, Integer.MAX_VALUE);
return filter;
}
} }
}, },
@ -211,26 +176,14 @@ public enum PreBuiltTokenFilters {
NGRAM(CachingStrategy.LUCENE) { NGRAM(CachingStrategy.LUCENE) {
@Override @Override
public TokenStream create(TokenStream tokenStream, Version version) { public TokenStream create(TokenStream tokenStream, Version version) {
if (version.luceneVersion.onOrAfter(org.apache.lucene.util.Version.LUCENE_4_4_0)) { return new NGramTokenFilter(tokenStream);
return new NGramTokenFilter(tokenStream);
} else {
@SuppressWarnings("deprecation")
final TokenStream filter = new Lucene43NGramTokenFilter(tokenStream);
return filter;
}
} }
}, },
EDGE_NGRAM(CachingStrategy.LUCENE) { EDGE_NGRAM(CachingStrategy.LUCENE) {
@Override @Override
public TokenStream create(TokenStream tokenStream, Version version) { public TokenStream create(TokenStream tokenStream, Version version) {
if (version.luceneVersion.onOrAfter(org.apache.lucene.util.Version.LUCENE_4_4_0)) { return new EdgeNGramTokenFilter(tokenStream, EdgeNGramTokenFilter.DEFAULT_MIN_GRAM_SIZE, EdgeNGramTokenFilter.DEFAULT_MAX_GRAM_SIZE);
return new EdgeNGramTokenFilter(tokenStream, EdgeNGramTokenFilter.DEFAULT_MIN_GRAM_SIZE, EdgeNGramTokenFilter.DEFAULT_MAX_GRAM_SIZE);
} else {
@SuppressWarnings("deprecation")
final TokenStream filter = new Lucene43EdgeNGramTokenFilter(tokenStream, EdgeNGramTokenFilter.DEFAULT_MIN_GRAM_SIZE, EdgeNGramTokenFilter.DEFAULT_MAX_GRAM_SIZE);
return filter;
}
} }
}, },

View File

@ -24,16 +24,12 @@ import org.apache.lucene.analysis.core.LetterTokenizer;
import org.apache.lucene.analysis.core.LowerCaseTokenizer; import org.apache.lucene.analysis.core.LowerCaseTokenizer;
import org.apache.lucene.analysis.core.WhitespaceTokenizer; import org.apache.lucene.analysis.core.WhitespaceTokenizer;
import org.apache.lucene.analysis.ngram.EdgeNGramTokenizer; import org.apache.lucene.analysis.ngram.EdgeNGramTokenizer;
import org.apache.lucene.analysis.ngram.Lucene43EdgeNGramTokenizer;
import org.apache.lucene.analysis.ngram.Lucene43NGramTokenizer;
import org.apache.lucene.analysis.ngram.NGramTokenizer; import org.apache.lucene.analysis.ngram.NGramTokenizer;
import org.apache.lucene.analysis.path.PathHierarchyTokenizer; import org.apache.lucene.analysis.path.PathHierarchyTokenizer;
import org.apache.lucene.analysis.pattern.PatternTokenizer; import org.apache.lucene.analysis.pattern.PatternTokenizer;
import org.apache.lucene.analysis.standard.ClassicTokenizer; import org.apache.lucene.analysis.standard.ClassicTokenizer;
import org.apache.lucene.analysis.standard.StandardTokenizer; import org.apache.lucene.analysis.standard.StandardTokenizer;
import org.apache.lucene.analysis.standard.UAX29URLEmailTokenizer; import org.apache.lucene.analysis.standard.UAX29URLEmailTokenizer;
import org.apache.lucene.analysis.standard.std40.StandardTokenizer40;
import org.apache.lucene.analysis.standard.std40.UAX29URLEmailTokenizer40;
import org.apache.lucene.analysis.th.ThaiTokenizer; import org.apache.lucene.analysis.th.ThaiTokenizer;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.regex.Regex;
@ -50,11 +46,7 @@ public enum PreBuiltTokenizers {
STANDARD(CachingStrategy.LUCENE) { STANDARD(CachingStrategy.LUCENE) {
@Override @Override
protected Tokenizer create(Version version) { protected Tokenizer create(Version version) {
if (version.luceneVersion.onOrAfter(org.apache.lucene.util.Version.LUCENE_4_7_0)) { return new StandardTokenizer();
return new StandardTokenizer();
} else {
return new StandardTokenizer40();
}
} }
}, },
@ -68,11 +60,7 @@ public enum PreBuiltTokenizers {
UAX_URL_EMAIL(CachingStrategy.LUCENE) { UAX_URL_EMAIL(CachingStrategy.LUCENE) {
@Override @Override
protected Tokenizer create(Version version) { protected Tokenizer create(Version version) {
if (version.luceneVersion.onOrAfter(org.apache.lucene.util.Version.LUCENE_4_7_0)) { return new UAX29URLEmailTokenizer();
return new UAX29URLEmailTokenizer();
} else {
return new UAX29URLEmailTokenizer40();
}
} }
}, },
@ -114,28 +102,14 @@ public enum PreBuiltTokenizers {
NGRAM(CachingStrategy.LUCENE) { NGRAM(CachingStrategy.LUCENE) {
@Override @Override
protected Tokenizer create(Version version) { protected Tokenizer create(Version version) {
// see NGramTokenizerFactory for an explanation of this logic: return new NGramTokenizer();
// 4.4 patch was used before 4.4 was released
if (version.onOrAfter(org.elasticsearch.Version.V_0_90_2) &&
version.luceneVersion.onOrAfter(org.apache.lucene.util.Version.LUCENE_4_3)) {
return new NGramTokenizer();
} else {
return new Lucene43NGramTokenizer();
}
} }
}, },
EDGE_NGRAM(CachingStrategy.LUCENE) { EDGE_NGRAM(CachingStrategy.LUCENE) {
@Override @Override
protected Tokenizer create(Version version) { protected Tokenizer create(Version version) {
// see EdgeNGramTokenizerFactory for an explanation of this logic: return new EdgeNGramTokenizer(EdgeNGramTokenizer.DEFAULT_MIN_GRAM_SIZE, EdgeNGramTokenizer.DEFAULT_MAX_GRAM_SIZE);
// 4.4 patch was used before 4.4 was released
if (version.onOrAfter(org.elasticsearch.Version.V_0_90_2) &&
version.luceneVersion.onOrAfter(org.apache.lucene.util.Version.LUCENE_4_3)) {
return new EdgeNGramTokenizer(EdgeNGramTokenizer.DEFAULT_MIN_GRAM_SIZE, EdgeNGramTokenizer.DEFAULT_MAX_GRAM_SIZE);
} else {
return new Lucene43EdgeNGramTokenizer(EdgeNGramTokenizer.DEFAULT_MIN_GRAM_SIZE, EdgeNGramTokenizer.DEFAULT_MAX_GRAM_SIZE);
}
} }
}, },