diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/ngram/EdgeNGramFilterFactory.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/ngram/EdgeNGramFilterFactory.java index bd7ca1f84e8..db6a22a7cdd 100644 --- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/ngram/EdgeNGramFilterFactory.java +++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/ngram/EdgeNGramFilterFactory.java @@ -41,8 +41,8 @@ public class EdgeNGramFilterFactory extends TokenFilterFactory { /** Creates a new EdgeNGramFilterFactory */ public EdgeNGramFilterFactory(Map args) { super(args); - minGramSize = getInt(args, "minGramSize", EdgeNGramTokenFilter.DEFAULT_MIN_GRAM_SIZE); - maxGramSize = getInt(args, "maxGramSize", EdgeNGramTokenFilter.DEFAULT_MAX_GRAM_SIZE); + minGramSize = requireInt(args, "minGramSize"); + maxGramSize = requireInt(args, "maxGramSize"); preserveOriginal = getBoolean(args, "preserveOriginal", EdgeNGramTokenFilter.DEFAULT_PRESERVE_ORIGINAL); if (!args.isEmpty()) { throw new IllegalArgumentException("Unknown parameters: " + args); diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/ngram/EdgeNGramTokenFilter.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/ngram/EdgeNGramTokenFilter.java index 154f075e716..613f8a173ce 100644 --- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/ngram/EdgeNGramTokenFilter.java +++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/ngram/EdgeNGramTokenFilter.java @@ -32,16 +32,6 @@ import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; * supplementary characters. */ public final class EdgeNGramTokenFilter extends TokenFilter { - /** - * @deprecated since 7.4 - this value will be required. - */ - @Deprecated - public static final int DEFAULT_MAX_GRAM_SIZE = 1; - /** - * @deprecated since 7.4 - this value will be required. - */ - @Deprecated - public static final int DEFAULT_MIN_GRAM_SIZE = 1; public static final boolean DEFAULT_PRESERVE_ORIGINAL = false; private final int minGram; @@ -98,27 +88,6 @@ public final class EdgeNGramTokenFilter extends TokenFilter { this(input, gramSize, gramSize, DEFAULT_PRESERVE_ORIGINAL); } - /** - * Creates an EdgeNGramTokenFilter that, for a given input term, produces all - * edge n-grams with lengths >= minGram and <= maxGram. - * - *

- * Behaves the same as - * {@link #EdgeNGramTokenFilter(TokenStream, int, int, boolean) - * NGramTokenFilter(input, minGram, maxGram, false)} - * - * @param input {@link TokenStream} holding the input to be tokenized - * @param minGram the minimum length of the generated n-grams - * @param maxGram the maximum length of the generated n-grams - * - * @deprecated since 7.4. Use - * {@link #EdgeNGramTokenFilter(TokenStream, int, int, boolean)} instead. - */ - @Deprecated - public EdgeNGramTokenFilter(TokenStream input, int minGram, int maxGram) { - this(input, minGram, maxGram, DEFAULT_PRESERVE_ORIGINAL); - } - @Override public final boolean incrementToken() throws IOException { while (true) { diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/ngram/NGramFilterFactory.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/ngram/NGramFilterFactory.java index 0a7e77dcb43..9a681dfaf3e 100644 --- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/ngram/NGramFilterFactory.java +++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/ngram/NGramFilterFactory.java @@ -41,8 +41,8 @@ public class NGramFilterFactory extends TokenFilterFactory { /** Creates a new NGramFilterFactory */ public NGramFilterFactory(Map args) { super(args); - minGramSize = getInt(args, "minGramSize", NGramTokenFilter.DEFAULT_MIN_NGRAM_SIZE); - maxGramSize = getInt(args, "maxGramSize", NGramTokenFilter.DEFAULT_MAX_NGRAM_SIZE); + minGramSize = requireInt(args, "minGramSize"); + maxGramSize = requireInt(args, "maxGramSize"); preserveOriginal = getBoolean(args, "keepShortTerm", NGramTokenFilter.DEFAULT_PRESERVE_ORIGINAL); if (!args.isEmpty()) { throw new IllegalArgumentException("Unknown parameters: " + args); diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/ngram/NGramTokenFilter.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/ngram/NGramTokenFilter.java index 8e1a7e40936..5b6147b8ea0 100644 --- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/ngram/NGramTokenFilter.java +++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/ngram/NGramTokenFilter.java @@ -39,17 +39,6 @@ import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; * override {@link NGramTokenizer#isTokenChar(int)} to perform pre-tokenization. */ public final class NGramTokenFilter extends TokenFilter { - /** - * @deprecated since 7.4 - this value will be required. - */ - @Deprecated - public static final int DEFAULT_MIN_NGRAM_SIZE = 1; - - /** - * @deprecated since 7.4 - this value will be required. - */ - @Deprecated - public static final int DEFAULT_MAX_NGRAM_SIZE = 2; public static final boolean DEFAULT_PRESERVE_ORIGINAL = false; private final int minGram; @@ -106,44 +95,6 @@ public final class NGramTokenFilter extends TokenFilter { this(input, gramSize, gramSize, DEFAULT_PRESERVE_ORIGINAL); } - /** - * Creates an NGramTokenFilter that, for a given input term, produces all - * contained n-grams with lengths >= minGram and <= maxGram. - * - *

- * Behaves the same as - * {@link #NGramTokenFilter(TokenStream, int, int, boolean) - * NGramTokenFilter(input, minGram, maxGram, false)} - * - * @param input {@link TokenStream} holding the input to be tokenized - * @param minGram the minimum length of the generated n-grams - * @param maxGram the maximum length of the generated n-grams - * - * @deprecated since 7.4. Use - * {@link #NGramTokenFilter(TokenStream, int, int, boolean)} instead. - */ - @Deprecated - public NGramTokenFilter(TokenStream input, int minGram, int maxGram) { - this(input, minGram, maxGram, DEFAULT_PRESERVE_ORIGINAL); - } - - /** - * Creates NGramTokenFilter with default min and max n-grams. - * - *

- * Behaves the same as - * {@link #NGramTokenFilter(TokenStream, int, int, boolean) - * NGramTokenFilter(input, 1, 2, false)} - * - * @param input {@link TokenStream} holding the input to be tokenized - * @deprecated since 7.4. Use - * {@link #NGramTokenFilter(TokenStream, int, int, boolean)} instead. - */ - @Deprecated - public NGramTokenFilter(TokenStream input) { - this(input, DEFAULT_MIN_NGRAM_SIZE, DEFAULT_MAX_NGRAM_SIZE, DEFAULT_PRESERVE_ORIGINAL); - } - @Override public final boolean incrementToken() throws IOException { while (true) {