upgrade to Lucene-8.7.0-snapshot-61ea26a (#61957) (#61974)

This commit is contained in:
Ignacio Vera 2020-09-04 13:46:20 +02:00 committed by GitHub
parent bdccab7c7a
commit 31c026f25c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
74 changed files with 122 additions and 1350 deletions

View File

@ -1,5 +1,5 @@
elasticsearch = 7.10.0 elasticsearch = 7.10.0
lucene = 8.6.2 lucene = 8.7.0-snapshot-61ea26a
bundled_jdk_vendor = adoptopenjdk bundled_jdk_vendor = adoptopenjdk
bundled_jdk = 14.0.1+7 bundled_jdk = 14.0.1+7

View File

@ -1,8 +1,8 @@
include::{docs-root}/shared/versions/stack/{source_branch}.asciidoc[] include::{docs-root}/shared/versions/stack/{source_branch}.asciidoc[]
:lucene_version: 8.6.2 :lucene_version: 8.7.0
:lucene_version_path: 8_6_2 :lucene_version_path: 8.7.0
:jdk: 1.8.0_131 :jdk: 1.8.0_131
:jdk_major: 8 :jdk_major: 8
:build_flavor: default :build_flavor: default

View File

@ -1 +0,0 @@
2f8451c08cd6d19ecacea5f35db2f94014a3a423

View File

@ -0,0 +1 @@
d952f8254b64ff63b1846233371511073736e77d

View File

@ -1 +0,0 @@
75b90e16b9f0773a5fde05715f07f5a0b8a66658

View File

@ -0,0 +1 @@
f69abfe7fcdb8a9c1a929f5e7292924bbd38557a

View File

@ -1 +0,0 @@
665b1e0480983dd2e2996996a104cf05dca783c9

View File

@ -0,0 +1 @@
005c312e96d1c9d47fd3bf0980c810c30c497500

View File

@ -1 +0,0 @@
f3f001555ee4719adf4f8ce4d2bfe3de4ebd1f35

View File

@ -0,0 +1 @@
b0fb357b731fa483f747e2107c2e01b75160af72

View File

@ -1 +0,0 @@
a3676e5a7fb6ed20d0d641f893e3e7b1bda824ea

View File

@ -0,0 +1 @@
689b5f0e3371d0709765c9ba7fc21042cf41754d

View File

@ -1 +0,0 @@
6213af5b0dc92962cf9adc05a14f04682b9bbf15

View File

@ -0,0 +1 @@
5b0a0c7a88076ab8ee21d0b3781fae8d8353da84

View File

@ -1 +0,0 @@
0cb86dd6a32c867802fe5604dbab6e0fd84aa9fc

View File

@ -0,0 +1 @@
8cbea549bf71ca41bfd6ed985d188ee74d6e9098

View File

@ -1 +0,0 @@
3e44b8d571e18a67abb9e3bc941cd1a7cc4203c8

View File

@ -0,0 +1 @@
05cccc203ace5a4af42c77fb32b235c94f8ceb5a

View File

@ -1 +0,0 @@
c4c58571911464138b7ad8881a4319cb0d0ce504

View File

@ -0,0 +1 @@
2191c2c337bbc483c1bce0d7b35f0e00bece50ab

View File

@ -1 +0,0 @@
3719b09652d0c9de87de529060d9c2ce527f929c

View File

@ -0,0 +1 @@
d9d677d9b01528b984c722dd30fb1ceca9f601b7

View File

@ -1 +0,0 @@
de97930c2f5242124e52579eb3d23722e13f0f9d

View File

@ -0,0 +1 @@
331c1607af5fea2251e8c3cd6a0bce04f3ec2bc6

View File

@ -1 +0,0 @@
d6fd5936f99ab50622e75ba6dacff97090cb62e1

View File

@ -0,0 +1 @@
d5fa5c12ccb43629d79591c0079b5648a200e3cb

View File

@ -1 +0,0 @@
275ce757580877efb9370452c282a1de4d10fd3e

View File

@ -0,0 +1 @@
3e24abaa4ad85106b22dbdbd61756bda369c6d04

View File

@ -1 +0,0 @@
8f7e21a00095e0ebed741e2ee425da8019b47b6b

View File

@ -0,0 +1 @@
13b6769b7a956bc4d1b4e162a45aea9a8064948c

View File

@ -1 +0,0 @@
75412c308ff3d9f2bf45248538b9806ce624cc3d

View File

@ -0,0 +1 @@
8f17e777d3782bbec1036770e40823a867a72586

View File

@ -1 +0,0 @@
1072a1d3b3c6ffa40116a88c267bfa8523ccc166

View File

@ -0,0 +1 @@
3ddfc214dc0ea4b39532f2a3800181d1142afa02

View File

@ -1 +0,0 @@
f1edd4d4645d7e8ba07de3302a272123f061cbb5

View File

@ -0,0 +1 @@
b1b5e237cdf3a37f78dfecacdd13373427c6c2d6

View File

@ -1 +0,0 @@
27c206ac7cd7ba7b3edb176c781718c99efb14e2

View File

@ -0,0 +1 @@
115b85c09a0c0d6bd4213d100fbe2ac2494ff5e1

View File

@ -1 +0,0 @@
82d432972fecbeb3713329eca9240fd2bb322bd5

View File

@ -0,0 +1 @@
88c87545785f724c7a9babcf58818ae68c287423

View File

@ -1 +0,0 @@
9b145bd48c34e4a478f1e090a8424a3974ee23f1

View File

@ -0,0 +1 @@
e43bf5d798b5b12c2f4f815df0c1b238d02d1795

View File

@ -1 +0,0 @@
e9b93e2f6c5cdb74332b6b9aada4a190f20d45c3

View File

@ -0,0 +1 @@
a957497d9bb5c0c842703bbaa452125f462de309

View File

@ -1 +0,0 @@
1ffe015e1f765ec0e8aaccd3b82d7578584aebbf

View File

@ -0,0 +1 @@
c8978f546b65eefe4647f3bddcfffe2c574d0c84

File diff suppressed because it is too large Load Diff

View File

@ -1,143 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.lucene.search;
import org.apache.lucene.index.Term;
import org.apache.lucene.util.automaton.Automaton;
import org.apache.lucene.util.automaton.AutomatonProvider;
import org.apache.lucene.util.automaton.Operations;
/**
* Copy of Lucene's RegExpQuery class coming in 8.7 with case
* insensitive search option
* @deprecated
*/
@Deprecated
public class RegexpQuery87 extends AutomatonQuery {
/**
* A provider that provides no named automata
*/
private static AutomatonProvider defaultProvider = new AutomatonProvider() {
@Override
public Automaton getAutomaton(String name) {
return null;
}
};
/**
* Constructs a query for terms matching <code>term</code>.
* <p>
* By default, all regular expression features are enabled.
* </p>
*
* @param term regular expression.
*/
public RegexpQuery87(Term term) {
this(term, RegExp87.ALL);
}
/**
* Constructs a query for terms matching <code>term</code>.
*
* @param term regular expression.
* @param flags optional RegExp features from {@link RegExp87}
*/
public RegexpQuery87(Term term, int flags) {
this(term, flags, defaultProvider,
Operations.DEFAULT_MAX_DETERMINIZED_STATES);
}
/**
* Constructs a query for terms matching <code>term</code>.
*
* @param term regular expression.
* @param flags optional RegExp syntax features from {@link RegExp87}
* @param maxDeterminizedStates maximum number of states that compiling the
* automaton for the regexp can result in. Set higher to allow more complex
* queries and lower to prevent memory exhaustion.
*/
public RegexpQuery87(Term term, int flags, int maxDeterminizedStates) {
this(term, flags, defaultProvider, maxDeterminizedStates);
}
/**
* Constructs a query for terms matching <code>term</code>.
*
* @param term regular expression.
* @param syntax_flags optional RegExp syntax features from {@link RegExp87}
* automaton for the regexp can result in. Set higher to allow more complex
* queries and lower to prevent memory exhaustion.
* @param match_flags boolean 'or' of match behavior options such as case insensitivity
* @param maxDeterminizedStates maximum number of states that compiling the
*/
public RegexpQuery87(Term term, int syntax_flags, int match_flags, int maxDeterminizedStates) {
this(term, syntax_flags, match_flags, defaultProvider, maxDeterminizedStates);
}
/**
* Constructs a query for terms matching <code>term</code>.
*
* @param term regular expression.
* @param syntax_flags optional RegExp features from {@link RegExp87}
* @param provider custom AutomatonProvider for named automata
* @param maxDeterminizedStates maximum number of states that compiling the
* automaton for the regexp can result in. Set higher to allow more complex
* queries and lower to prevent memory exhaustion.
*/
public RegexpQuery87(Term term, int syntax_flags, AutomatonProvider provider,
int maxDeterminizedStates) {
this(term, syntax_flags, 0, provider, maxDeterminizedStates);
}
/**
* Constructs a query for terms matching <code>term</code>.
*
* @param term regular expression.
* @param syntax_flags optional RegExp features from {@link RegExp87}
* @param match_flags boolean 'or' of match behavior options such as case insensitivity
* @param provider custom AutomatonProvider for named automata
* @param maxDeterminizedStates maximum number of states that compiling the
* automaton for the regexp can result in. Set higher to allow more complex
* queries and lower to prevent memory exhaustion.
*/
public RegexpQuery87(Term term, int syntax_flags, int match_flags, AutomatonProvider provider,
int maxDeterminizedStates) {
super(term,
new RegExp87(term.text(), syntax_flags, match_flags).toAutomaton(
provider, maxDeterminizedStates), maxDeterminizedStates);
}
/** Returns the regexp of this query wrapped in a Term. */
public Term getRegexp() {
return term;
}
/** Prints a user-readable version of this query. */
@Override
public String toString(String field) {
StringBuilder buffer = new StringBuilder();
if (!term.field().equals(field)) {
buffer.append(term.field());
buffer.append(":");
}
buffer.append('/');
buffer.append(term.text());
buffer.append('/');
return buffer.toString();
}
}

View File

@ -145,7 +145,7 @@ public class Version implements Comparable<Version>, ToXContentFragment {
public static final Version V_7_9_0 = new Version(7090099, org.apache.lucene.util.Version.LUCENE_8_6_0); public static final Version V_7_9_0 = new Version(7090099, org.apache.lucene.util.Version.LUCENE_8_6_0);
public static final Version V_7_9_1 = new Version(7090199, org.apache.lucene.util.Version.LUCENE_8_6_2); public static final Version V_7_9_1 = new Version(7090199, org.apache.lucene.util.Version.LUCENE_8_6_2);
public static final Version V_7_9_2 = new Version(7090299, org.apache.lucene.util.Version.LUCENE_8_6_2); public static final Version V_7_9_2 = new Version(7090299, org.apache.lucene.util.Version.LUCENE_8_6_2);
public static final Version V_7_10_0 = new Version(7100099, org.apache.lucene.util.Version.LUCENE_8_6_2); public static final Version V_7_10_0 = new Version(7100099, org.apache.lucene.util.Version.LUCENE_8_7_0);
public static final Version CURRENT = V_7_10_0; public static final Version CURRENT = V_7_10_0;
private static final ImmutableOpenIntMap<Version> idToVersion; private static final ImmutableOpenIntMap<Version> idToVersion;

View File

@ -104,7 +104,7 @@ import java.util.List;
import java.util.Map; import java.util.Map;
public class Lucene { public class Lucene {
public static final String LATEST_CODEC = "Lucene86"; public static final String LATEST_CODEC = "Lucene87";
public static final String SOFT_DELETES_FIELD = "__soft_deletes"; public static final String SOFT_DELETES_FIELD = "__soft_deletes";

View File

@ -21,8 +21,8 @@ package org.elasticsearch.index.codec;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.Codec;
import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat.Mode; import org.apache.lucene.codecs.lucene87.Lucene87Codec;
import org.apache.lucene.codecs.lucene86.Lucene86Codec; import org.apache.lucene.codecs.lucene87.Lucene87StoredFieldsFormat.Mode;
import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.collect.MapBuilder;
import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperService;
@ -47,8 +47,8 @@ public class CodecService {
public CodecService(@Nullable MapperService mapperService, Logger logger) { public CodecService(@Nullable MapperService mapperService, Logger logger) {
final MapBuilder<String, Codec> codecs = MapBuilder.<String, Codec>newMapBuilder(); final MapBuilder<String, Codec> codecs = MapBuilder.<String, Codec>newMapBuilder();
if (mapperService == null) { if (mapperService == null) {
codecs.put(DEFAULT_CODEC, new Lucene86Codec()); codecs.put(DEFAULT_CODEC, new Lucene87Codec());
codecs.put(BEST_COMPRESSION_CODEC, new Lucene86Codec(Mode.BEST_COMPRESSION)); codecs.put(BEST_COMPRESSION_CODEC, new Lucene87Codec(Mode.BEST_COMPRESSION));
} else { } else {
codecs.put(DEFAULT_CODEC, codecs.put(DEFAULT_CODEC,
new PerFieldMappingPostingFormatCodec(Mode.BEST_SPEED, mapperService, logger)); new PerFieldMappingPostingFormatCodec(Mode.BEST_SPEED, mapperService, logger));

View File

@ -22,8 +22,8 @@ package org.elasticsearch.index.codec;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.Codec;
import org.apache.lucene.codecs.PostingsFormat; import org.apache.lucene.codecs.PostingsFormat;
import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat; import org.apache.lucene.codecs.lucene87.Lucene87StoredFieldsFormat;
import org.apache.lucene.codecs.lucene86.Lucene86Codec; import org.apache.lucene.codecs.lucene87.Lucene87Codec;
import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.index.mapper.CompletionFieldMapper; import org.elasticsearch.index.mapper.CompletionFieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MappedFieldType;
@ -37,7 +37,7 @@ import org.elasticsearch.index.mapper.MapperService;
* per index in real time via the mapping API. If no specific postings format is * per index in real time via the mapping API. If no specific postings format is
* configured for a specific field the default postings format is used. * configured for a specific field the default postings format is used.
*/ */
public class PerFieldMappingPostingFormatCodec extends Lucene86Codec { public class PerFieldMappingPostingFormatCodec extends Lucene87Codec {
private final Logger logger; private final Logger logger;
private final MapperService mapperService; private final MapperService mapperService;
@ -46,7 +46,7 @@ public class PerFieldMappingPostingFormatCodec extends Lucene86Codec {
"PerFieldMappingPostingFormatCodec must subclass the latest " + "lucene codec: " + Lucene.LATEST_CODEC; "PerFieldMappingPostingFormatCodec must subclass the latest " + "lucene codec: " + Lucene.LATEST_CODEC;
} }
public PerFieldMappingPostingFormatCodec(Lucene50StoredFieldsFormat.Mode compressionMode, MapperService mapperService, Logger logger) { public PerFieldMappingPostingFormatCodec(Lucene87StoredFieldsFormat.Mode compressionMode, MapperService mapperService, Logger logger) {
super(compressionMode); super(compressionMode);
this.mapperService = mapperService; this.mapperService = mapperService;
this.logger = logger; this.logger = logger;

View File

@ -25,7 +25,7 @@ import org.apache.lucene.search.FuzzyQuery;
import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.MultiTermQuery;
import org.apache.lucene.search.PrefixQuery; import org.apache.lucene.search.PrefixQuery;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.search.RegexpQuery87; import org.apache.lucene.search.RegexpQuery;
import org.apache.lucene.search.TermRangeQuery; import org.apache.lucene.search.TermRangeQuery;
import org.apache.lucene.search.WildcardQuery; import org.apache.lucene.search.WildcardQuery;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
@ -141,7 +141,7 @@ public abstract class StringFieldType extends TermBasedFieldType {
ALLOW_EXPENSIVE_QUERIES.getKey() + "' is set to false."); ALLOW_EXPENSIVE_QUERIES.getKey() + "' is set to false.");
} }
failIfNotIndexed(); failIfNotIndexed();
RegexpQuery87 query = new RegexpQuery87(new Term(name(), indexedValueForSearch(value)), syntaxFlags, RegexpQuery query = new RegexpQuery(new Term(name(), indexedValueForSearch(value)), syntaxFlags,
matchFlags, maxDeterminizedStates); matchFlags, maxDeterminizedStates);
if (method != null) { if (method != null) {
query.setRewriteMethod(method); query.setRewriteMethod(method);

View File

@ -18,7 +18,7 @@
*/ */
package org.elasticsearch.index.query; package org.elasticsearch.index.query;
import org.apache.lucene.search.RegExp87; import org.apache.lucene.util.automaton.RegExp;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import java.util.Locale; import java.util.Locale;
@ -43,37 +43,37 @@ public enum RegexpFlag {
/** /**
* Enables intersection of the form: {@code &lt;expression&gt; &amp; &lt;expression&gt;} * Enables intersection of the form: {@code &lt;expression&gt; &amp; &lt;expression&gt;}
*/ */
INTERSECTION(RegExp87.INTERSECTION), INTERSECTION(RegExp.INTERSECTION),
/** /**
* Enables complement expression of the form: {@code ~&lt;expression&gt;} * Enables complement expression of the form: {@code ~&lt;expression&gt;}
*/ */
COMPLEMENT(RegExp87.COMPLEMENT), COMPLEMENT(RegExp.COMPLEMENT),
/** /**
* Enables empty language expression: {@code #} * Enables empty language expression: {@code #}
*/ */
EMPTY(RegExp87.EMPTY), EMPTY(RegExp.EMPTY),
/** /**
* Enables any string expression: {@code @} * Enables any string expression: {@code @}
*/ */
ANYSTRING(RegExp87.ANYSTRING), ANYSTRING(RegExp.ANYSTRING),
/** /**
* Enables numerical interval expression: {@code &lt;n-m&gt;} * Enables numerical interval expression: {@code &lt;n-m&gt;}
*/ */
INTERVAL(RegExp87.INTERVAL), INTERVAL(RegExp.INTERVAL),
/** /**
* Disables all available option flags * Disables all available option flags
*/ */
NONE(RegExp87.NONE), NONE(RegExp.NONE),
/** /**
* Enables all available option flags * Enables all available option flags
*/ */
ALL(RegExp87.ALL); ALL(RegExp.ALL);
final int value; final int value;
@ -110,9 +110,9 @@ public enum RegexpFlag {
*/ */
public static int resolveValue(String flags) { public static int resolveValue(String flags) {
if (flags == null || flags.isEmpty()) { if (flags == null || flags.isEmpty()) {
return RegExp87.ALL; return RegExp.ALL;
} }
int magic = RegExp87.NONE; int magic = RegExp.NONE;
for (String s : Strings.delimitedListToStringArray(flags, "|")) { for (String s : Strings.delimitedListToStringArray(flags, "|")) {
if (s.isEmpty()) { if (s.isEmpty()) {
continue; continue;

View File

@ -22,9 +22,9 @@ package org.elasticsearch.index.query;
import org.apache.lucene.index.Term; import org.apache.lucene.index.Term;
import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.MultiTermQuery;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.search.RegExp87; import org.apache.lucene.search.RegexpQuery;
import org.apache.lucene.search.RegexpQuery87;
import org.apache.lucene.util.automaton.Operations; import org.apache.lucene.util.automaton.Operations;
import org.apache.lucene.util.automaton.RegExp;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.ParsingException;
@ -288,17 +288,17 @@ public class RegexpQueryBuilder extends AbstractQueryBuilder<RegexpQueryBuilder>
} }
MultiTermQuery.RewriteMethod method = QueryParsers.parseRewriteMethod(rewrite, null, LoggingDeprecationHandler.INSTANCE); MultiTermQuery.RewriteMethod method = QueryParsers.parseRewriteMethod(rewrite, null, LoggingDeprecationHandler.INSTANCE);
int matchFlagsValue = caseInsensitive ? RegExp87.ASCII_CASE_INSENSITIVE : 0; int matchFlagsValue = caseInsensitive ? RegExp.ASCII_CASE_INSENSITIVE : 0;
Query query = null; Query query = null;
// For BWC we mask irrelevant bits (RegExp changed ALL from 0xffff to 0xff) // For BWC we mask irrelevant bits (RegExp changed ALL from 0xffff to 0xff)
int sanitisedSyntaxFlag = syntaxFlagsValue & RegExp87.ALL; int sanitisedSyntaxFlag = syntaxFlagsValue & RegExp.ALL;
MappedFieldType fieldType = context.fieldMapper(fieldName); MappedFieldType fieldType = context.fieldMapper(fieldName);
if (fieldType != null) { if (fieldType != null) {
query = fieldType.regexpQuery(value, sanitisedSyntaxFlag, matchFlagsValue, maxDeterminizedStates, method, context); query = fieldType.regexpQuery(value, sanitisedSyntaxFlag, matchFlagsValue, maxDeterminizedStates, method, context);
} }
if (query == null) { if (query == null) {
RegexpQuery87 regexpQuery = new RegexpQuery87(new Term(fieldName, BytesRefs.toBytesRef(value)), sanitisedSyntaxFlag, RegexpQuery regexpQuery = new RegexpQuery(new Term(fieldName, BytesRefs.toBytesRef(value)), sanitisedSyntaxFlag,
matchFlagsValue, maxDeterminizedStates); matchFlagsValue, maxDeterminizedStates);
if (method != null) { if (method != null) {
regexpQuery.setRewriteMethod(method); regexpQuery.setRewriteMethod(method);

View File

@ -37,13 +37,13 @@ import org.apache.lucene.search.MultiPhraseQuery;
import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.MultiTermQuery;
import org.apache.lucene.search.PhraseQuery; import org.apache.lucene.search.PhraseQuery;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.search.RegExp87;
import org.apache.lucene.search.SynonymQuery; import org.apache.lucene.search.SynonymQuery;
import org.apache.lucene.search.WildcardQuery; import org.apache.lucene.search.WildcardQuery;
import org.apache.lucene.search.spans.SpanNearQuery; import org.apache.lucene.search.spans.SpanNearQuery;
import org.apache.lucene.search.spans.SpanOrQuery; import org.apache.lucene.search.spans.SpanOrQuery;
import org.apache.lucene.search.spans.SpanQuery; import org.apache.lucene.search.spans.SpanQuery;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.automaton.RegExp;
import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.unit.Fuzziness;
@ -731,7 +731,7 @@ public class QueryStringQueryParser extends XQueryParser {
setAnalyzer(forceAnalyzer); setAnalyzer(forceAnalyzer);
return super.getRegexpQuery(field, termStr); return super.getRegexpQuery(field, termStr);
} }
return currentFieldType.regexpQuery(termStr, RegExp87.ALL, 0, getMaxDeterminizedStates(), return currentFieldType.regexpQuery(termStr, RegExp.ALL, 0, getMaxDeterminizedStates(),
getMultiTermRewriteMethod(), context); getMultiTermRewriteMethod(), context);
} catch (RuntimeException e) { } catch (RuntimeException e) {
if (lenient) { if (lenient) {

View File

@ -19,15 +19,14 @@
package org.elasticsearch.search.suggest.completion; package org.elasticsearch.search.suggest.completion;
import org.apache.lucene.search.RegExp87;
import org.apache.lucene.util.automaton.Operations; import org.apache.lucene.util.automaton.Operations;
import org.apache.lucene.util.automaton.RegExp;
import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContent.Params;
import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.ToXContentFragment;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
@ -143,7 +142,7 @@ public class RegexOptions implements ToXContentFragment, Writeable {
* Options for regular expression queries * Options for regular expression queries
*/ */
public static class Builder { public static class Builder {
private int flagsValue = RegExp87.ALL; private int flagsValue = RegExp.ALL;
private int maxDeterminizedStates = Operations.DEFAULT_MAX_DETERMINIZED_STATES; private int maxDeterminizedStates = Operations.DEFAULT_MAX_DETERMINIZED_STATES;
public Builder() { public Builder() {

View File

@ -21,9 +21,9 @@ package org.elasticsearch.index.codec;
import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.LogManager;
import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.Codec;
import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat; import org.apache.lucene.codecs.lucene87.Lucene87Codec;
import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat.Mode; import org.apache.lucene.codecs.lucene87.Lucene87StoredFieldsFormat;
import org.apache.lucene.codecs.lucene86.Lucene86Codec; import org.apache.lucene.codecs.lucene87.Lucene87StoredFieldsFormat.Mode;
import org.apache.lucene.document.Document; import org.apache.lucene.document.Document;
import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriter;
@ -53,8 +53,8 @@ public class CodecTests extends ESTestCase {
public void testResolveDefaultCodecs() throws Exception { public void testResolveDefaultCodecs() throws Exception {
CodecService codecService = createCodecService(); CodecService codecService = createCodecService();
assertThat(codecService.codec("default"), instanceOf(PerFieldMappingPostingFormatCodec.class)); assertThat(codecService.codec("default"), instanceOf(PerFieldMappingPostingFormatCodec.class));
assertThat(codecService.codec("default"), instanceOf(Lucene86Codec.class)); assertThat(codecService.codec("default"), instanceOf(Lucene87Codec.class));
assertThat(codecService.codec("Lucene86"), instanceOf(Lucene86Codec.class)); assertThat(codecService.codec("Lucene87"), instanceOf(Lucene87Codec.class));
} }
public void testDefault() throws Exception { public void testDefault() throws Exception {
@ -78,7 +78,7 @@ public class CodecTests extends ESTestCase {
iw.close(); iw.close();
DirectoryReader ir = DirectoryReader.open(dir); DirectoryReader ir = DirectoryReader.open(dir);
SegmentReader sr = (SegmentReader) ir.leaves().get(0).reader(); SegmentReader sr = (SegmentReader) ir.leaves().get(0).reader();
String v = sr.getSegmentInfo().info.getAttribute(Lucene50StoredFieldsFormat.MODE_KEY); String v = sr.getSegmentInfo().info.getAttribute(Lucene87StoredFieldsFormat.MODE_KEY);
assertNotNull(v); assertNotNull(v);
assertEquals(expected, Mode.valueOf(v)); assertEquals(expected, Mode.valueOf(v));
ir.close(); ir.close();

View File

@ -19,7 +19,7 @@
package org.elasticsearch.index.engine; package org.elasticsearch.index.engine;
import org.apache.lucene.codecs.PostingsFormat; import org.apache.lucene.codecs.PostingsFormat;
import org.apache.lucene.codecs.lucene86.Lucene86Codec; import org.apache.lucene.codecs.lucene87.Lucene87Codec;
import org.apache.lucene.document.Document; import org.apache.lucene.document.Document;
import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriter;
@ -57,7 +57,7 @@ public class CompletionStatsCacheTests extends ESTestCase {
public void testCompletionStatsCache() throws IOException, InterruptedException { public void testCompletionStatsCache() throws IOException, InterruptedException {
final IndexWriterConfig indexWriterConfig = newIndexWriterConfig(); final IndexWriterConfig indexWriterConfig = newIndexWriterConfig();
final PostingsFormat postingsFormat = new Completion84PostingsFormat(); final PostingsFormat postingsFormat = new Completion84PostingsFormat();
indexWriterConfig.setCodec(new Lucene86Codec() { indexWriterConfig.setCodec(new Lucene87Codec() {
@Override @Override
public PostingsFormat getPostingsFormatForField(String field) { public PostingsFormat getPostingsFormatForField(String field) {
return postingsFormat; // all fields are suggest fields return postingsFormat; // all fields are suggest fields

View File

@ -22,7 +22,6 @@ import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.document.SortedSetDocValuesField; import org.apache.lucene.document.SortedSetDocValuesField;
import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.IndexableField;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.search.RegExp87;
import org.apache.lucene.search.suggest.document.CompletionAnalyzer; import org.apache.lucene.search.suggest.document.CompletionAnalyzer;
import org.apache.lucene.search.suggest.document.ContextSuggestField; import org.apache.lucene.search.suggest.document.ContextSuggestField;
import org.apache.lucene.search.suggest.document.FuzzyCompletionQuery; import org.apache.lucene.search.suggest.document.FuzzyCompletionQuery;
@ -32,6 +31,7 @@ import org.apache.lucene.search.suggest.document.SuggestField;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.CharsRefBuilder; import org.apache.lucene.util.CharsRefBuilder;
import org.apache.lucene.util.automaton.Operations; import org.apache.lucene.util.automaton.Operations;
import org.apache.lucene.util.automaton.RegExp;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
@ -889,7 +889,7 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase {
Mapper fieldMapper = defaultMapper.mappers().getMapper("completion"); Mapper fieldMapper = defaultMapper.mappers().getMapper("completion");
CompletionFieldMapper completionFieldMapper = (CompletionFieldMapper) fieldMapper; CompletionFieldMapper completionFieldMapper = (CompletionFieldMapper) fieldMapper;
Query prefixQuery = completionFieldMapper.fieldType() Query prefixQuery = completionFieldMapper.fieldType()
.regexpQuery(new BytesRef("co"), RegExp87.ALL, Operations.DEFAULT_MAX_DETERMINIZED_STATES); .regexpQuery(new BytesRef("co"), RegExp.ALL, Operations.DEFAULT_MAX_DETERMINIZED_STATES);
assertThat(prefixQuery, instanceOf(RegexCompletionQuery.class)); assertThat(prefixQuery, instanceOf(RegexCompletionQuery.class));
} }

View File

@ -22,7 +22,7 @@ package org.elasticsearch.index.mapper;
import org.apache.lucene.index.Term; import org.apache.lucene.index.Term;
import org.apache.lucene.search.PrefixQuery; import org.apache.lucene.search.PrefixQuery;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.search.RegexpQuery87; import org.apache.lucene.search.RegexpQuery;
import org.apache.lucene.search.WildcardQuery; import org.apache.lucene.search.WildcardQuery;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchException;
@ -44,7 +44,7 @@ public class IgnoredFieldTypeTests extends FieldTypeTestCase {
public void testRegexpQuery() { public void testRegexpQuery() {
MappedFieldType ft = IgnoredFieldMapper.IgnoredFieldType.INSTANCE; MappedFieldType ft = IgnoredFieldMapper.IgnoredFieldType.INSTANCE;
Query expected = new RegexpQuery87(new Term("_ignored", new BytesRef("foo?"))); Query expected = new RegexpQuery(new Term("_ignored", new BytesRef("foo?")));
assertEquals(expected, ft.regexpQuery("foo?", 0, 0, 10, null, MOCK_QSC)); assertEquals(expected, ft.regexpQuery("foo?", 0, 0, 10, null, MOCK_QSC));
ElasticsearchException ee = expectThrows(ElasticsearchException.class, ElasticsearchException ee = expectThrows(ElasticsearchException.class,

View File

@ -29,7 +29,7 @@ import org.apache.lucene.index.Term;
import org.apache.lucene.search.DocValuesFieldExistsQuery; import org.apache.lucene.search.DocValuesFieldExistsQuery;
import org.apache.lucene.search.FuzzyQuery; import org.apache.lucene.search.FuzzyQuery;
import org.apache.lucene.search.NormsFieldExistsQuery; import org.apache.lucene.search.NormsFieldExistsQuery;
import org.apache.lucene.search.RegexpQuery87; import org.apache.lucene.search.RegexpQuery;
import org.apache.lucene.search.TermInSetQuery; import org.apache.lucene.search.TermInSetQuery;
import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TermRangeQuery; import org.apache.lucene.search.TermRangeQuery;
@ -128,7 +128,7 @@ public class KeywordFieldTypeTests extends FieldTypeTestCase {
public void testRegexpQuery() { public void testRegexpQuery() {
MappedFieldType ft = new KeywordFieldType("field"); MappedFieldType ft = new KeywordFieldType("field");
assertEquals(new RegexpQuery87(new Term("field","foo.*")), assertEquals(new RegexpQuery(new Term("field","foo.*")),
ft.regexpQuery("foo.*", 0, 0, 10, null, MOCK_QSC)); ft.regexpQuery("foo.*", 0, 0, 10, null, MOCK_QSC));
MappedFieldType unsearchable = new KeywordFieldType("field", false, true, Collections.emptyMap()); MappedFieldType unsearchable = new KeywordFieldType("field", false, true, Collections.emptyMap());

View File

@ -21,7 +21,7 @@ package org.elasticsearch.index.mapper;
import org.apache.lucene.index.Term; import org.apache.lucene.index.Term;
import org.apache.lucene.search.PrefixQuery; import org.apache.lucene.search.PrefixQuery;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.search.RegexpQuery87; import org.apache.lucene.search.RegexpQuery;
import org.apache.lucene.search.WildcardQuery; import org.apache.lucene.search.WildcardQuery;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchException;
@ -43,7 +43,7 @@ public class RoutingFieldTypeTests extends FieldTypeTestCase {
public void testRegexpQuery() { public void testRegexpQuery() {
MappedFieldType ft = RoutingFieldMapper.RoutingFieldType.INSTANCE; MappedFieldType ft = RoutingFieldMapper.RoutingFieldType.INSTANCE;
Query expected = new RegexpQuery87(new Term("_routing", new BytesRef("foo?"))); Query expected = new RegexpQuery(new Term("_routing", new BytesRef("foo?")));
assertEquals(expected, ft.regexpQuery("foo?", 0, 0, 10, null, MOCK_QSC)); assertEquals(expected, ft.regexpQuery("foo?", 0, 0, 10, null, MOCK_QSC));
ElasticsearchException ee = expectThrows(ElasticsearchException.class, ElasticsearchException ee = expectThrows(ElasticsearchException.class,

View File

@ -26,7 +26,7 @@ import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.FuzzyQuery; import org.apache.lucene.search.FuzzyQuery;
import org.apache.lucene.search.PrefixQuery; import org.apache.lucene.search.PrefixQuery;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.search.RegexpQuery87; import org.apache.lucene.search.RegexpQuery;
import org.apache.lucene.search.TermInSetQuery; import org.apache.lucene.search.TermInSetQuery;
import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TermRangeQuery; import org.apache.lucene.search.TermRangeQuery;
@ -86,7 +86,7 @@ public class TextFieldTypeTests extends FieldTypeTestCase {
public void testRegexpQuery() { public void testRegexpQuery() {
MappedFieldType ft = new TextFieldType("field"); MappedFieldType ft = new TextFieldType("field");
assertEquals(new RegexpQuery87(new Term("field","foo.*")), assertEquals(new RegexpQuery(new Term("field","foo.*")),
ft.regexpQuery("foo.*", 0, 0, 10, null, MOCK_QSC)); ft.regexpQuery("foo.*", 0, 0, 10, null, MOCK_QSC));
MappedFieldType unsearchable = new TextFieldType("field", false, Collections.emptyMap()); MappedFieldType unsearchable = new TextFieldType("field", false, Collections.emptyMap());

View File

@ -40,7 +40,7 @@ import org.apache.lucene.search.NormsFieldExistsQuery;
import org.apache.lucene.search.PhraseQuery; import org.apache.lucene.search.PhraseQuery;
import org.apache.lucene.search.PrefixQuery; import org.apache.lucene.search.PrefixQuery;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.search.RegexpQuery87; import org.apache.lucene.search.RegexpQuery;
import org.apache.lucene.search.SynonymQuery; import org.apache.lucene.search.SynonymQuery;
import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TermRangeQuery; import org.apache.lucene.search.TermRangeQuery;
@ -733,8 +733,8 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase<QueryStr
Query query = queryStringQuery("/foo*bar/").defaultField(TEXT_FIELD_NAME) Query query = queryStringQuery("/foo*bar/").defaultField(TEXT_FIELD_NAME)
.maxDeterminizedStates(5000) .maxDeterminizedStates(5000)
.toQuery(createShardContext()); .toQuery(createShardContext());
assertThat(query, instanceOf(RegexpQuery87.class)); assertThat(query, instanceOf(RegexpQuery.class));
RegexpQuery87 regexpQuery = (RegexpQuery87) query; RegexpQuery regexpQuery = (RegexpQuery) query;
assertTrue(regexpQuery.toString().contains("/foo*bar/")); assertTrue(regexpQuery.toString().contains("/foo*bar/"));
} }

View File

@ -20,9 +20,7 @@
package org.elasticsearch.index.query; package org.elasticsearch.index.query;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.search.RegExp87; import org.apache.lucene.search.RegexpQuery;
import org.apache.lucene.search.RegexpQuery87;
import org.apache.lucene.util.automaton.RegExp;
import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.ParsingException;
import org.elasticsearch.test.AbstractQueryTestCase; import org.elasticsearch.test.AbstractQueryTestCase;
@ -82,8 +80,8 @@ public class RegexpQueryBuilderTests extends AbstractQueryTestCase<RegexpQueryBu
@Override @Override
protected void doAssertLuceneQuery(RegexpQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { protected void doAssertLuceneQuery(RegexpQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
assertThat(query, instanceOf(RegexpQuery87.class)); assertThat(query, instanceOf(RegexpQuery.class));
RegexpQuery87 regexpQuery = (RegexpQuery87) query; RegexpQuery regexpQuery = (RegexpQuery) query;
String expectedFieldName = expectedFieldName( queryBuilder.fieldName()); String expectedFieldName = expectedFieldName( queryBuilder.fieldName());
assertThat(regexpQuery.getField(), equalTo(expectedFieldName)); assertThat(regexpQuery.getField(), equalTo(expectedFieldName));
@ -167,11 +165,4 @@ public class RegexpQueryBuilderTests extends AbstractQueryTestCase<RegexpQueryBu
ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(json)); ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(json));
assertEquals("[regexp] query does not support [case_insensitive] = false", e.getMessage()); assertEquals("[regexp] query does not support [case_insensitive] = false", e.getMessage());
} }
public void testDeadCode() {
assertTrue(RegExp87.class + " should be replaced with 8.7's "+RegExp.class,
org.apache.lucene.util.Version.LATEST.major == 8 && org.apache.lucene.util.Version.LATEST.minor < 7);
}
} }

View File

@ -61,14 +61,13 @@ import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Supplier; import java.util.function.Supplier;
import static org.apache.lucene.codecs.compressing.CompressingStoredFieldsWriter.FIELDS_EXTENSION; import static org.apache.lucene.codecs.compressing.CompressingStoredFieldsWriter.FIELDS_EXTENSION;
import static org.apache.lucene.codecs.compressing.CompressingStoredFieldsWriter.INDEX_EXTENSION_PREFIX; import static org.apache.lucene.codecs.compressing.CompressingStoredFieldsWriter.INDEX_EXTENSION;
import static org.apache.lucene.codecs.compressing.FieldsIndexWriter.FIELDS_INDEX_EXTENSION_SUFFIX; import static org.apache.lucene.codecs.compressing.CompressingStoredFieldsWriter.META_EXTENSION;
import static org.apache.lucene.codecs.compressing.FieldsIndexWriter.FIELDS_META_EXTENSION_SUFFIX;
public class SourceOnlySnapshot { public class SourceOnlySnapshot {
private static final String FIELDS_INDEX_EXTENSION = INDEX_EXTENSION_PREFIX + FIELDS_INDEX_EXTENSION_SUFFIX; private static final String FIELDS_INDEX_EXTENSION = INDEX_EXTENSION;
private static final String FIELDS_META_EXTENSION = INDEX_EXTENSION_PREFIX + FIELDS_META_EXTENSION_SUFFIX; private static final String FIELDS_META_EXTENSION = META_EXTENSION;
private final LinkedFilesDirectory targetDirectory; private final LinkedFilesDirectory targetDirectory;
private final Supplier<Query> deleteByQuerySupplier; private final Supplier<Query> deleteByQuerySupplier;

View File

@ -13,12 +13,12 @@ import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.MultiTermQuery;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.search.RegExp87;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.UnicodeUtil; import org.apache.lucene.util.UnicodeUtil;
import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.Automaton;
import org.apache.lucene.util.automaton.CharacterRunAutomaton; import org.apache.lucene.util.automaton.CharacterRunAutomaton;
import org.apache.lucene.util.automaton.LevenshteinAutomata; import org.apache.lucene.util.automaton.LevenshteinAutomata;
import org.apache.lucene.util.automaton.RegExp;
import org.elasticsearch.common.geo.ShapeRelation; import org.elasticsearch.common.geo.ShapeRelation;
import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.lucene.BytesRefs;
import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.regex.Regex;
@ -216,7 +216,7 @@ public class ConstantKeywordFieldMapper extends FieldMapper {
return new MatchNoDocsQuery(); return new MatchNoDocsQuery();
} }
final Automaton automaton = new RegExp87(value, syntaxFlags, matchFlags).toAutomaton(maxDeterminizedStates); final Automaton automaton = new RegExp(value, syntaxFlags, matchFlags).toAutomaton(maxDeterminizedStates);
final CharacterRunAutomaton runAutomaton = new CharacterRunAutomaton(automaton); final CharacterRunAutomaton runAutomaton = new CharacterRunAutomaton(automaton);
if (runAutomaton.run(this.value)) { if (runAutomaton.run(this.value)) {
return new MatchAllDocsQuery(); return new MatchAllDocsQuery();

View File

@ -8,7 +8,7 @@ package org.elasticsearch.xpack.constantkeyword.mapper;
import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.RegExp87; import org.apache.lucene.util.automaton.RegExp;
import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.index.mapper.FieldTypeTestCase; import org.elasticsearch.index.mapper.FieldTypeTestCase;
import org.elasticsearch.xpack.constantkeyword.mapper.ConstantKeywordFieldMapper.ConstantKeywordFieldType; import org.elasticsearch.xpack.constantkeyword.mapper.ConstantKeywordFieldMapper.ConstantKeywordFieldType;
@ -86,9 +86,9 @@ public class ConstantKeywordFieldTypeTests extends FieldTypeTestCase {
public void testRegexpQuery() { public void testRegexpQuery() {
ConstantKeywordFieldType none = new ConstantKeywordFieldType("f", null); ConstantKeywordFieldType none = new ConstantKeywordFieldType("f", null);
assertEquals(new MatchNoDocsQuery(), none.regexpQuery("f..o", RegExp87.ALL, 0, 10, null, null)); assertEquals(new MatchNoDocsQuery(), none.regexpQuery("f..o", RegExp.ALL, 0, 10, null, null));
ConstantKeywordFieldType ft = new ConstantKeywordFieldType("f", "foo"); ConstantKeywordFieldType ft = new ConstantKeywordFieldType("f", "foo");
assertEquals(new MatchAllDocsQuery(), ft.regexpQuery("f.o", RegExp87.ALL, 0, 10, null, null)); assertEquals(new MatchAllDocsQuery(), ft.regexpQuery("f.o", RegExp.ALL, 0, 10, null, null));
assertEquals(new MatchNoDocsQuery(), ft.regexpQuery("f..o", RegExp87.ALL, 0, 10, null, null)); assertEquals(new MatchNoDocsQuery(), ft.regexpQuery("f..o", RegExp.ALL, 0, 10, null, null));
} }
} }

View File

@ -10,7 +10,7 @@ import org.apache.lucene.index.Term;
import org.apache.lucene.search.DocValuesFieldExistsQuery; import org.apache.lucene.search.DocValuesFieldExistsQuery;
import org.apache.lucene.search.FuzzyQuery; import org.apache.lucene.search.FuzzyQuery;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.search.RegexpQuery87; import org.apache.lucene.search.RegexpQuery;
import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TermRangeQuery; import org.apache.lucene.search.TermRangeQuery;
import org.apache.lucene.search.WildcardQuery; import org.apache.lucene.search.WildcardQuery;
@ -96,7 +96,7 @@ public class RootFlatObjectFieldTypeTests extends FieldTypeTestCase {
public void testRegexpQuery() { public void testRegexpQuery() {
RootFlatObjectFieldType ft = createDefaultFieldType(); RootFlatObjectFieldType ft = createDefaultFieldType();
Query expected = new RegexpQuery87(new Term("field", "val.*")); Query expected = new RegexpQuery(new Term("field", "val.*"));
Query actual = ft.regexpQuery("val.*", 0, 0, 10, null, MOCK_QSC); Query actual = ft.regexpQuery("val.*", 0, 0, 10, null, MOCK_QSC);
assertEquals(expected, actual); assertEquals(expected, actual);

View File

@ -1 +0,0 @@
de97930c2f5242124e52579eb3d23722e13f0f9d

View File

@ -0,0 +1 @@
331c1607af5fea2251e8c3cd6a0bce04f3ec2bc6

View File

@ -29,13 +29,13 @@ import org.apache.lucene.search.MultiTermQuery;
import org.apache.lucene.search.MultiTermQuery.RewriteMethod; import org.apache.lucene.search.MultiTermQuery.RewriteMethod;
import org.apache.lucene.search.PrefixQuery; import org.apache.lucene.search.PrefixQuery;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.search.RegExp87;
import org.apache.lucene.search.RegExp87.Kind;
import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TermRangeQuery; import org.apache.lucene.search.TermRangeQuery;
import org.apache.lucene.search.WildcardQuery; import org.apache.lucene.search.WildcardQuery;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.Automaton;
import org.apache.lucene.util.automaton.RegExp;
import org.apache.lucene.util.automaton.RegExp.Kind;
import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.geo.ShapeRelation; import org.elasticsearch.common.geo.ShapeRelation;
import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.lucene.BytesRefs;
@ -301,7 +301,7 @@ public class WildcardFieldMapper extends FieldMapper {
return new MatchNoDocsQuery(); return new MatchNoDocsQuery();
} }
RegExp87 ngramRegex = new RegExp87(addLineEndChars(toLowerCase(value)), syntaxFlags, matchFlags); RegExp ngramRegex = new RegExp(addLineEndChars(toLowerCase(value)), syntaxFlags, matchFlags);
Query approxBooleanQuery = toApproximationQuery(ngramRegex); Query approxBooleanQuery = toApproximationQuery(ngramRegex);
Query approxNgramQuery = rewriteBoolToNgramQuery(approxBooleanQuery); Query approxNgramQuery = rewriteBoolToNgramQuery(approxBooleanQuery);
@ -312,7 +312,7 @@ public class WildcardFieldMapper extends FieldMapper {
return existsQuery(context); return existsQuery(context);
} }
Supplier<Automaton> deferredAutomatonSupplier = ()-> { Supplier<Automaton> deferredAutomatonSupplier = ()-> {
RegExp87 regex = new RegExp87(value, syntaxFlags, matchFlags); RegExp regex = new RegExp(value, syntaxFlags, matchFlags);
return regex.toAutomaton(maxDeterminizedStates); return regex.toAutomaton(maxDeterminizedStates);
}; };
@ -341,7 +341,7 @@ public class WildcardFieldMapper extends FieldMapper {
// * If an expression resolves to a RegExpQuery eg ?? then only the verification // * If an expression resolves to a RegExpQuery eg ?? then only the verification
// query is run. // query is run.
// * Anything else is a concrete query that should be run on the ngram index. // * Anything else is a concrete query that should be run on the ngram index.
public static Query toApproximationQuery(RegExp87 r) throws IllegalArgumentException { public static Query toApproximationQuery(RegExp r) throws IllegalArgumentException {
Query result = null; Query result = null;
switch (r.kind) { switch (r.kind) {
case REGEXP_UNION: case REGEXP_UNION:
@ -402,7 +402,7 @@ public class WildcardFieldMapper extends FieldMapper {
return result; return result;
} }
private static Query createConcatenationQuery(RegExp87 r) { private static Query createConcatenationQuery(RegExp r) {
// Create ANDs of expressions plus collapse consecutive TermQuerys into single longer ones // Create ANDs of expressions plus collapse consecutive TermQuerys into single longer ones
ArrayList<Query> queries = new ArrayList<>(); ArrayList<Query> queries = new ArrayList<>();
findLeaves(r.exp1, Kind.REGEXP_CONCATENATION, queries); findLeaves(r.exp1, Kind.REGEXP_CONCATENATION, queries);
@ -433,7 +433,7 @@ public class WildcardFieldMapper extends FieldMapper {
} }
private static Query createUnionQuery(RegExp87 r) { private static Query createUnionQuery(RegExp r) {
// Create an OR of clauses // Create an OR of clauses
ArrayList<Query> queries = new ArrayList<>(); ArrayList<Query> queries = new ArrayList<>();
findLeaves(r.exp1, Kind.REGEXP_UNION, queries); findLeaves(r.exp1, Kind.REGEXP_UNION, queries);
@ -460,7 +460,7 @@ public class WildcardFieldMapper extends FieldMapper {
return new MatchAllButRequireVerificationQuery(); return new MatchAllButRequireVerificationQuery();
} }
private static void findLeaves(RegExp87 exp, Kind kind, List<Query> queries) { private static void findLeaves(RegExp exp, Kind kind, List<Query> queries) {
if (exp.kind == kind) { if (exp.kind == kind) {
findLeaves(exp.exp1, kind, queries); findLeaves(exp.exp1, kind, queries);
findLeaves( exp.exp2, kind, queries); findLeaves( exp.exp2, kind, queries);

View File

@ -26,7 +26,6 @@ import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.search.RegExp87;
import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.Sort; import org.apache.lucene.search.Sort;
import org.apache.lucene.search.SortField; import org.apache.lucene.search.SortField;
@ -38,6 +37,7 @@ import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.Automaton;
import org.apache.lucene.util.automaton.ByteRunAutomaton; import org.apache.lucene.util.automaton.ByteRunAutomaton;
import org.apache.lucene.util.automaton.RegExp;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.collect.List; import org.elasticsearch.common.collect.List;
@ -170,7 +170,7 @@ public class WildcardFieldMapperTests extends ESTestCase {
assertThat(wildcardFieldTopDocs.totalHits.value, equalTo(0L)); assertThat(wildcardFieldTopDocs.totalHits.value, equalTo(0L));
// Test regexp query // Test regexp query
wildcardFieldQuery = wildcardFieldType.fieldType().regexpQuery(queryString, RegExp87.ALL, 0, 20000, null, MOCK_QSC); wildcardFieldQuery = wildcardFieldType.fieldType().regexpQuery(queryString, RegExp.ALL, 0, 20000, null, MOCK_QSC);
wildcardFieldTopDocs = searcher.search(wildcardFieldQuery, 10, Sort.INDEXORDER); wildcardFieldTopDocs = searcher.search(wildcardFieldQuery, 10, Sort.INDEXORDER);
assertThat(wildcardFieldTopDocs.totalHits.value, equalTo(0L)); assertThat(wildcardFieldTopDocs.totalHits.value, equalTo(0L));
@ -227,8 +227,8 @@ public class WildcardFieldMapperTests extends ESTestCase {
break; break;
case 1: case 1:
pattern = getRandomRegexPattern(values); pattern = getRandomRegexPattern(values);
wildcardFieldQuery = wildcardFieldType.fieldType().regexpQuery(pattern, RegExp87.ALL, 0, 20000, null, MOCK_QSC); wildcardFieldQuery = wildcardFieldType.fieldType().regexpQuery(pattern, RegExp.ALL, 0, 20000, null, MOCK_QSC);
keywordFieldQuery = keywordFieldType.fieldType().regexpQuery(pattern, RegExp87.ALL, 0,20000, null, MOCK_QSC); keywordFieldQuery = keywordFieldType.fieldType().regexpQuery(pattern, RegExp.ALL, 0,20000, null, MOCK_QSC);
break; break;
case 2: case 2:
pattern = randomABString(5); pattern = randomABString(5);
@ -381,12 +381,12 @@ public class WildcardFieldMapperTests extends ESTestCase {
// All these expressions should rewrite to a match all with no verification step required at all // All these expressions should rewrite to a match all with no verification step required at all
String superfastRegexes[]= { ".*", "...*..", "(foo|bar|.*)", "@"}; String superfastRegexes[]= { ".*", "...*..", "(foo|bar|.*)", "@"};
for (String regex : superfastRegexes) { for (String regex : superfastRegexes) {
Query wildcardFieldQuery = wildcardFieldType.fieldType().regexpQuery(regex, RegExp87.ALL, 0, 20000, null, MOCK_QSC); Query wildcardFieldQuery = wildcardFieldType.fieldType().regexpQuery(regex, RegExp.ALL, 0, 20000, null, MOCK_QSC);
assertTrue(wildcardFieldQuery instanceof DocValuesFieldExistsQuery); assertTrue(wildcardFieldQuery instanceof DocValuesFieldExistsQuery);
} }
String matchNoDocsRegexes[]= { ""}; String matchNoDocsRegexes[]= { ""};
for (String regex : matchNoDocsRegexes) { for (String regex : matchNoDocsRegexes) {
Query wildcardFieldQuery = wildcardFieldType.fieldType().regexpQuery(regex, RegExp87.ALL, 0, 20000, null, MOCK_QSC); Query wildcardFieldQuery = wildcardFieldType.fieldType().regexpQuery(regex, RegExp.ALL, 0, 20000, null, MOCK_QSC);
assertTrue(wildcardFieldQuery instanceof MatchNoDocsQuery); assertTrue(wildcardFieldQuery instanceof MatchNoDocsQuery);
} }
@ -406,7 +406,7 @@ public class WildcardFieldMapperTests extends ESTestCase {
for (String[] test : acceleratedTests) { for (String[] test : acceleratedTests) {
String regex = test[0]; String regex = test[0];
String expectedAccelerationQueryString = test[1].replaceAll("_", ""+WildcardFieldMapper.TOKEN_START_OR_END_CHAR); String expectedAccelerationQueryString = test[1].replaceAll("_", ""+WildcardFieldMapper.TOKEN_START_OR_END_CHAR);
Query wildcardFieldQuery = wildcardFieldType.fieldType().regexpQuery(regex, RegExp87.ALL, 0, 20000, null, MOCK_QSC); Query wildcardFieldQuery = wildcardFieldType.fieldType().regexpQuery(regex, RegExp.ALL, 0, 20000, null, MOCK_QSC);
testExpectedAccelerationQuery(regex, wildcardFieldQuery, expectedAccelerationQueryString); testExpectedAccelerationQuery(regex, wildcardFieldQuery, expectedAccelerationQueryString);
} }
@ -414,7 +414,7 @@ public class WildcardFieldMapperTests extends ESTestCase {
// TODO we can possibly improve on some of these // TODO we can possibly improve on some of these
String matchAllButVerifyTests[]= { "..", "(a)?","(a|b){0,3}", "((foo)?|(foo|bar)?)", "@&~(abc.+)", "aaa.+&.+bbb"}; String matchAllButVerifyTests[]= { "..", "(a)?","(a|b){0,3}", "((foo)?|(foo|bar)?)", "@&~(abc.+)", "aaa.+&.+bbb"};
for (String regex : matchAllButVerifyTests) { for (String regex : matchAllButVerifyTests) {
Query wildcardFieldQuery = wildcardFieldType.fieldType().regexpQuery(regex, RegExp87.ALL, 0, 20000, null, MOCK_QSC); Query wildcardFieldQuery = wildcardFieldType.fieldType().regexpQuery(regex, RegExp.ALL, 0, 20000, null, MOCK_QSC);
assertTrue(regex +" was not a pure verify query " +formatQuery(wildcardFieldQuery), assertTrue(regex +" was not a pure verify query " +formatQuery(wildcardFieldQuery),
wildcardFieldQuery instanceof AutomatonQueryOnBinaryDv); wildcardFieldQuery instanceof AutomatonQueryOnBinaryDv);
} }
@ -430,7 +430,7 @@ public class WildcardFieldMapperTests extends ESTestCase {
for (String[] test : suboptimalTests) { for (String[] test : suboptimalTests) {
String regex = test[0]; String regex = test[0];
String expectedAccelerationQueryString = test[1].replaceAll("_", ""+WildcardFieldMapper.TOKEN_START_OR_END_CHAR); String expectedAccelerationQueryString = test[1].replaceAll("_", ""+WildcardFieldMapper.TOKEN_START_OR_END_CHAR);
Query wildcardFieldQuery = wildcardFieldType.fieldType().regexpQuery(regex, RegExp87.ALL, 0, 20000, null, MOCK_QSC); Query wildcardFieldQuery = wildcardFieldType.fieldType().regexpQuery(regex, RegExp.ALL, 0, 20000, null, MOCK_QSC);
testExpectedAccelerationQuery(regex, wildcardFieldQuery, expectedAccelerationQueryString); testExpectedAccelerationQuery(regex, wildcardFieldQuery, expectedAccelerationQueryString);
} }
@ -769,7 +769,7 @@ public class WildcardFieldMapperTests extends ESTestCase {
} }
//Assert our randomly generated regex actually matches the provided raw input. //Assert our randomly generated regex actually matches the provided raw input.
RegExp87 regex = new RegExp87(result.toString()); RegExp regex = new RegExp(result.toString());
Automaton automaton = regex.toAutomaton(); Automaton automaton = regex.toAutomaton();
ByteRunAutomaton bytesMatcher = new ByteRunAutomaton(automaton); ByteRunAutomaton bytesMatcher = new ByteRunAutomaton(automaton);
BytesRef br = new BytesRef(randomValue); BytesRef br = new BytesRef(randomValue);