Query refactoring: MatchQueryBuilder
This add equals, hashcode, read/write methods, separates toQuery and JSON parsing and adds tests. Also moving MatchQueryBuilder.Type to MatchQuery to MatchQuery, adding serialization and hashcode, equals there. Relates to #10217
This commit is contained in:
parent
5cc423a229
commit
90fac17a2d
|
@ -76,6 +76,10 @@ public class ExtendedCommonTermsQuery extends CommonTermsQuery {
|
|||
return lowFreqMinNumShouldMatchSpec;
|
||||
}
|
||||
|
||||
public float getMaxTermFrequency() {
|
||||
return this.maxTermFrequency;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Query newTermQuery(Term term, TermContext context) {
|
||||
if (fieldType == null) {
|
||||
|
|
|
@ -19,11 +19,21 @@
|
|||
|
||||
package org.elasticsearch.index.query;
|
||||
|
||||
import org.apache.lucene.queries.ExtendedCommonTermsQuery;
|
||||
import org.apache.lucene.search.BooleanQuery;
|
||||
import org.apache.lucene.search.FuzzyQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
import org.elasticsearch.common.unit.Fuzziness;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.query.support.QueryParsers;
|
||||
import org.elasticsearch.index.search.MatchQuery;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Locale;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Match query is a query that analyzes the text and constructs a query as the result of the analysis. It
|
||||
|
@ -31,80 +41,90 @@ import java.util.Locale;
|
|||
*/
|
||||
public class MatchQueryBuilder extends AbstractQueryBuilder<MatchQueryBuilder> {
|
||||
|
||||
/** The default name for the match query */
|
||||
public static final String NAME = "match";
|
||||
|
||||
public enum Type {
|
||||
/**
|
||||
* The text is analyzed and terms are added to a boolean query.
|
||||
*/
|
||||
BOOLEAN,
|
||||
/**
|
||||
* The text is analyzed and used as a phrase query.
|
||||
*/
|
||||
PHRASE,
|
||||
/**
|
||||
* The text is analyzed and used in a phrase query, with the last term acting as a prefix.
|
||||
*/
|
||||
PHRASE_PREFIX
|
||||
}
|
||||
/** The default mode terms are combined in a match query */
|
||||
public static final Operator DEFAULT_OPERATOR = Operator.OR;
|
||||
|
||||
public enum ZeroTermsQuery {
|
||||
NONE,
|
||||
ALL
|
||||
}
|
||||
/** The default mode match query type */
|
||||
public static final MatchQuery.Type DEFAULT_TYPE = MatchQuery.Type.BOOLEAN;
|
||||
|
||||
private final String name;
|
||||
private final String fieldName;
|
||||
|
||||
private final Object text;
|
||||
private final Object value;
|
||||
|
||||
private Type type;
|
||||
private MatchQuery.Type type = DEFAULT_TYPE;
|
||||
|
||||
private Operator operator;
|
||||
private Operator operator = DEFAULT_OPERATOR;
|
||||
|
||||
private String analyzer;
|
||||
|
||||
private Integer slop;
|
||||
private int slop = MatchQuery.DEFAULT_PHRASE_SLOP;
|
||||
|
||||
private Fuzziness fuzziness;
|
||||
private Fuzziness fuzziness = null;
|
||||
|
||||
private Integer prefixLength;
|
||||
private int prefixLength = FuzzyQuery.defaultPrefixLength;
|
||||
|
||||
private Integer maxExpansions;
|
||||
private int maxExpansions = FuzzyQuery.defaultMaxExpansions;
|
||||
|
||||
private boolean fuzzyTranspositions = FuzzyQuery.defaultTranspositions;
|
||||
|
||||
private String minimumShouldMatch;
|
||||
|
||||
private String fuzzyRewrite = null;
|
||||
|
||||
private Boolean lenient;
|
||||
private boolean lenient = MatchQuery.DEFAULT_LENIENCY;
|
||||
|
||||
private Boolean fuzzyTranspositions = null;
|
||||
private MatchQuery.ZeroTermsQuery zeroTermsQuery = MatchQuery.DEFAULT_ZERO_TERMS_QUERY;
|
||||
|
||||
private ZeroTermsQuery zeroTermsQuery;
|
||||
private Float cutoffFrequency = null;
|
||||
|
||||
private Float cutoff_Frequency = null;
|
||||
|
||||
static final MatchQueryBuilder PROTOTYPE = new MatchQueryBuilder(null, null);
|
||||
static final MatchQueryBuilder PROTOTYPE = new MatchQueryBuilder("","");
|
||||
|
||||
/**
|
||||
* Constructs a new text query.
|
||||
* Constructs a new match query.
|
||||
*/
|
||||
public MatchQueryBuilder(String name, Object text) {
|
||||
this.name = name;
|
||||
this.text = text;
|
||||
public MatchQueryBuilder(String fieldName, Object value) {
|
||||
if (fieldName == null) {
|
||||
throw new IllegalArgumentException("[" + NAME + "] requires fieldName");
|
||||
}
|
||||
if (value == null) {
|
||||
throw new IllegalArgumentException("[" + NAME + "] requires query value");
|
||||
}
|
||||
this.fieldName = fieldName;
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the type of the text query.
|
||||
*/
|
||||
public MatchQueryBuilder type(Type type) {
|
||||
/** Returns the field name used in this query. */
|
||||
public String fieldName() {
|
||||
return this.fieldName;
|
||||
}
|
||||
|
||||
/** Returns the value used in this query. */
|
||||
public Object value() {
|
||||
return this.value;
|
||||
}
|
||||
|
||||
/** Sets the type of the text query. */
|
||||
public MatchQueryBuilder type(MatchQuery.Type type) {
|
||||
if (type == null) {
|
||||
throw new IllegalArgumentException("[" + NAME + "] requires type to be non-null");
|
||||
}
|
||||
this.type = type;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the operator to use when using a boolean query. Defaults to <tt>OR</tt>.
|
||||
*/
|
||||
/** Get the type of the query. */
|
||||
public MatchQuery.Type type() {
|
||||
return this.type;
|
||||
}
|
||||
|
||||
/** Sets the operator to use when using a boolean query. Defaults to <tt>OR</tt>. */
|
||||
public MatchQueryBuilder operator(Operator operator) {
|
||||
if (operator == null) {
|
||||
throw new IllegalArgumentException("[" + NAME + "] requires operator to be non-null");
|
||||
}
|
||||
this.operator = operator;
|
||||
return this;
|
||||
}
|
||||
|
@ -118,27 +138,56 @@ public class MatchQueryBuilder extends AbstractQueryBuilder<MatchQueryBuilder> {
|
|||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the phrase slop if evaluated to a phrase query type.
|
||||
*/
|
||||
/** Get the analyzer to use, if previously set, otherwise <tt>null</tt> */
|
||||
public String analyzer() {
|
||||
return this.analyzer;
|
||||
}
|
||||
|
||||
/** Sets a slop factor for phrase queries */
|
||||
public MatchQueryBuilder slop(int slop) {
|
||||
if (slop < 0 ) {
|
||||
throw new IllegalArgumentException("No negative slop allowed.");
|
||||
}
|
||||
this.slop = slop;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the fuzziness used when evaluated to a fuzzy query type. Defaults to "AUTO".
|
||||
*/
|
||||
/** Get the slop factor for phrase queries. */
|
||||
public int slop() {
|
||||
return this.slop;
|
||||
}
|
||||
|
||||
/** Sets the fuzziness used when evaluated to a fuzzy query type. Defaults to "AUTO". */
|
||||
public MatchQueryBuilder fuzziness(Object fuzziness) {
|
||||
this.fuzziness = Fuzziness.build(fuzziness);
|
||||
return this;
|
||||
}
|
||||
|
||||
/** Gets the fuzziness used when evaluated to a fuzzy query type. */
|
||||
public Fuzziness fuzziness() {
|
||||
return this.fuzziness;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the length of a length of common (non-fuzzy) prefix for fuzzy match queries
|
||||
* @param prefixLength non-negative length of prefix
|
||||
* @throws IllegalArgumentException in case the prefix is negative
|
||||
*/
|
||||
public MatchQueryBuilder prefixLength(int prefixLength) {
|
||||
if (prefixLength < 0 ) {
|
||||
throw new IllegalArgumentException("No negative prefix length allowed.");
|
||||
}
|
||||
this.prefixLength = prefixLength;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the length of a length of common (non-fuzzy) prefix for fuzzy match queries
|
||||
*/
|
||||
public int prefixLength() {
|
||||
return this.prefixLength;
|
||||
}
|
||||
|
||||
/**
|
||||
* When using fuzzy or prefix type query, the number of term expansions to use. Defaults to unbounded
|
||||
* so its recommended to set it to a reasonable value for faster execution.
|
||||
|
@ -149,95 +198,259 @@ public class MatchQueryBuilder extends AbstractQueryBuilder<MatchQueryBuilder> {
|
|||
}
|
||||
|
||||
/**
|
||||
* Set a cutoff value in [0..1] (or absolute number >=1) representing the
|
||||
* Get the (optional) number of term expansions when using fuzzy or prefix type query.
|
||||
*/
|
||||
public int maxExpansions() {
|
||||
return this.maxExpansions;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets an optional cutoff value in [0..1] (or absolute number >=1) representing the
|
||||
* maximum threshold of a terms document frequency to be considered a low
|
||||
* frequency term.
|
||||
*/
|
||||
public MatchQueryBuilder cutoffFrequency(float cutoff) {
|
||||
this.cutoff_Frequency = cutoff;
|
||||
this.cutoffFrequency = cutoff;
|
||||
return this;
|
||||
}
|
||||
|
||||
/** Gets the optional cutoff value, can be <tt>null</tt> if not set previously */
|
||||
public Float cutoffFrequency() {
|
||||
return this.cutoffFrequency;
|
||||
}
|
||||
|
||||
/** Sets optional minimumShouldMatch value to apply to the query */
|
||||
public MatchQueryBuilder minimumShouldMatch(String minimumShouldMatch) {
|
||||
this.minimumShouldMatch = minimumShouldMatch;
|
||||
return this;
|
||||
}
|
||||
|
||||
/** Gets the minimumShouldMatch value */
|
||||
public String minimumShouldMatch() {
|
||||
return this.minimumShouldMatch;
|
||||
}
|
||||
|
||||
/** Sets the fuzzy_rewrite parameter controlling how the fuzzy query will get rewritten */
|
||||
public MatchQueryBuilder fuzzyRewrite(String fuzzyRewrite) {
|
||||
this.fuzzyRewrite = fuzzyRewrite;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the fuzzy_rewrite parameter
|
||||
* @see #fuzzyRewrite(String)
|
||||
*/
|
||||
public String fuzzyRewrite() {
|
||||
return this.fuzzyRewrite;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets whether transpositions are supported in fuzzy queries.<p>
|
||||
* The default metric used by fuzzy queries to determine a match is the Damerau-Levenshtein
|
||||
* distance formula which supports transpositions. Setting transposition to false will
|
||||
* switch to classic Levenshtein distance.<br>
|
||||
* If not set, Damerau-Levenshtein distance metric will be used.
|
||||
*/
|
||||
public MatchQueryBuilder fuzzyTranspositions(boolean fuzzyTranspositions) {
|
||||
//LUCENE 4 UPGRADE add documentation
|
||||
this.fuzzyTranspositions = fuzzyTranspositions;
|
||||
return this;
|
||||
}
|
||||
|
||||
/** Gets the fuzzy query transposition setting. */
|
||||
public boolean fuzzyTranspositions() {
|
||||
return this.fuzzyTranspositions;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets whether format based failures will be ignored.
|
||||
* @deprecated use #lenient() instead
|
||||
*/
|
||||
@Deprecated
|
||||
public MatchQueryBuilder setLenient(boolean lenient) {
|
||||
return lenient(lenient);
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets whether format based failures will be ignored.
|
||||
*/
|
||||
public MatchQueryBuilder setLenient(boolean lenient) {
|
||||
public MatchQueryBuilder lenient(boolean lenient) {
|
||||
this.lenient = lenient;
|
||||
return this;
|
||||
}
|
||||
|
||||
public MatchQueryBuilder zeroTermsQuery(ZeroTermsQuery zeroTermsQuery) {
|
||||
/**
|
||||
* Gets leniency setting that controls if format based failures will be ignored.
|
||||
*/
|
||||
public boolean lenient() {
|
||||
return this.lenient;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets query to use in case no query terms are available, e.g. after analysis removed them.
|
||||
* Defaults to {@link MatchQuery.ZeroTermsQuery#NONE}, but can be set to
|
||||
* {@link MatchQuery.ZeroTermsQuery#ALL} instead.
|
||||
*/
|
||||
public MatchQueryBuilder zeroTermsQuery(MatchQuery.ZeroTermsQuery zeroTermsQuery) {
|
||||
this.zeroTermsQuery = zeroTermsQuery;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the setting for handling zero terms queries.
|
||||
* @see #zeroTermsQuery(ZeroTermsQuery)
|
||||
*/
|
||||
public MatchQuery.ZeroTermsQuery zeroTermsQuery() {
|
||||
return this.zeroTermsQuery;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void doXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject(NAME);
|
||||
builder.startObject(name);
|
||||
builder.startObject(fieldName);
|
||||
|
||||
builder.field("query", text);
|
||||
if (type != null) {
|
||||
builder.field("query", value);
|
||||
builder.field("type", type.toString().toLowerCase(Locale.ENGLISH));
|
||||
}
|
||||
if (operator != null) {
|
||||
builder.field("operator", operator.toString());
|
||||
}
|
||||
if (analyzer != null) {
|
||||
builder.field("analyzer", analyzer);
|
||||
}
|
||||
if (slop != null) {
|
||||
builder.field("slop", slop);
|
||||
}
|
||||
if (fuzziness != null) {
|
||||
fuzziness.toXContent(builder, params);
|
||||
}
|
||||
if (prefixLength != null) {
|
||||
builder.field("prefix_length", prefixLength);
|
||||
}
|
||||
if (maxExpansions != null) {
|
||||
builder.field("max_expansions", maxExpansions);
|
||||
}
|
||||
if (minimumShouldMatch != null) {
|
||||
builder.field("minimum_should_match", minimumShouldMatch);
|
||||
}
|
||||
if (fuzzyRewrite != null) {
|
||||
builder.field("fuzzy_rewrite", fuzzyRewrite);
|
||||
}
|
||||
if (fuzzyTranspositions != null) {
|
||||
// LUCENE 4 UPGRADE we need to document this & test this
|
||||
builder.field("fuzzy_transpositions", fuzzyTranspositions);
|
||||
}
|
||||
if (lenient != null) {
|
||||
builder.field("lenient", lenient);
|
||||
}
|
||||
if (zeroTermsQuery != null) {
|
||||
builder.field("zero_terms_query", zeroTermsQuery.toString());
|
||||
}
|
||||
if (cutoff_Frequency != null) {
|
||||
builder.field("cutoff_frequency", cutoff_Frequency);
|
||||
if (cutoffFrequency != null) {
|
||||
builder.field("cutoff_frequency", cutoffFrequency);
|
||||
}
|
||||
printBoostAndQueryName(builder);
|
||||
builder.endObject();
|
||||
builder.endObject();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Query doToQuery(QueryShardContext context) throws IOException {
|
||||
// validate context specific fields
|
||||
if (analyzer != null && context.analysisService().analyzer(analyzer) == null) {
|
||||
throw new QueryShardException(context, "[match] analyzer [" + analyzer + "] not found");
|
||||
}
|
||||
|
||||
MatchQuery matchQuery = new MatchQuery(context);
|
||||
matchQuery.setOccur(operator.toBooleanClauseOccur());
|
||||
matchQuery.setAnalyzer(analyzer);
|
||||
matchQuery.setPhraseSlop(slop);
|
||||
matchQuery.setFuzziness(fuzziness);
|
||||
matchQuery.setFuzzyPrefixLength(prefixLength);
|
||||
matchQuery.setMaxExpansions(maxExpansions);
|
||||
matchQuery.setTranspositions(fuzzyTranspositions);
|
||||
matchQuery.setFuzzyRewriteMethod(QueryParsers.parseRewriteMethod(context.parseFieldMatcher(), fuzzyRewrite, null));
|
||||
matchQuery.setLenient(lenient);
|
||||
matchQuery.setCommonTermsCutoff(cutoffFrequency);
|
||||
matchQuery.setZeroTermsQuery(zeroTermsQuery);
|
||||
|
||||
Query query = matchQuery.parse(type, fieldName, value);
|
||||
if (query == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (query instanceof BooleanQuery) {
|
||||
query = Queries.applyMinimumShouldMatch((BooleanQuery) query, minimumShouldMatch);
|
||||
} else if (query instanceof ExtendedCommonTermsQuery) {
|
||||
((ExtendedCommonTermsQuery)query).setLowFreqMinimumNumberShouldMatch(minimumShouldMatch);
|
||||
}
|
||||
return query;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean doEquals(MatchQueryBuilder other) {
|
||||
return Objects.equals(fieldName, other.fieldName) &&
|
||||
Objects.equals(value, other.value) &&
|
||||
Objects.equals(type, other.type) &&
|
||||
Objects.equals(operator, other.operator) &&
|
||||
Objects.equals(analyzer, other.analyzer) &&
|
||||
Objects.equals(slop, other.slop) &&
|
||||
Objects.equals(fuzziness, other.fuzziness) &&
|
||||
Objects.equals(prefixLength, other.prefixLength) &&
|
||||
Objects.equals(maxExpansions, other.maxExpansions) &&
|
||||
Objects.equals(minimumShouldMatch, other.minimumShouldMatch) &&
|
||||
Objects.equals(fuzzyRewrite, other.fuzzyRewrite) &&
|
||||
Objects.equals(lenient, other.lenient) &&
|
||||
Objects.equals(fuzzyTranspositions, other.fuzzyTranspositions) &&
|
||||
Objects.equals(zeroTermsQuery, other.zeroTermsQuery) &&
|
||||
Objects.equals(cutoffFrequency, other.cutoffFrequency);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected int doHashCode() {
|
||||
return Objects.hash(fieldName, value, type, operator, analyzer, slop,
|
||||
fuzziness, prefixLength, maxExpansions, minimumShouldMatch,
|
||||
fuzzyRewrite, lenient, fuzzyTranspositions, zeroTermsQuery, cutoffFrequency);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected MatchQueryBuilder doReadFrom(StreamInput in) throws IOException {
|
||||
MatchQueryBuilder matchQuery = new MatchQueryBuilder(in.readString(), in.readGenericValue());
|
||||
matchQuery.type = MatchQuery.Type.readTypeFrom(in);
|
||||
matchQuery.operator = Operator.readOperatorFrom(in);
|
||||
matchQuery.slop = in.readVInt();
|
||||
matchQuery.prefixLength = in.readVInt();
|
||||
matchQuery.maxExpansions = in.readVInt();
|
||||
matchQuery.fuzzyTranspositions = in.readBoolean();
|
||||
matchQuery.lenient = in.readBoolean();
|
||||
matchQuery.zeroTermsQuery = MatchQuery.ZeroTermsQuery.readZeroTermsQueryFrom(in);
|
||||
// optional fields
|
||||
matchQuery.analyzer = in.readOptionalString();
|
||||
matchQuery.minimumShouldMatch = in.readOptionalString();
|
||||
matchQuery.fuzzyRewrite = in.readOptionalString();
|
||||
if (in.readBoolean()) {
|
||||
matchQuery.fuzziness = Fuzziness.readFuzzinessFrom(in);
|
||||
}
|
||||
if (in.readBoolean()) {
|
||||
matchQuery.cutoffFrequency = in.readFloat();
|
||||
}
|
||||
return matchQuery;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doWriteTo(StreamOutput out) throws IOException {
|
||||
out.writeString(fieldName);
|
||||
out.writeGenericValue(value);
|
||||
type.writeTo(out);
|
||||
operator.writeTo(out);
|
||||
out.writeVInt(slop);
|
||||
out.writeVInt(prefixLength);
|
||||
out.writeVInt(maxExpansions);
|
||||
out.writeBoolean(fuzzyTranspositions);
|
||||
out.writeBoolean(lenient);
|
||||
zeroTermsQuery.writeTo(out);
|
||||
// optional fields
|
||||
out.writeOptionalString(analyzer);
|
||||
out.writeOptionalString(minimumShouldMatch);
|
||||
out.writeOptionalString(fuzzyRewrite);
|
||||
if (fuzziness == null) {
|
||||
out.writeBoolean(false);
|
||||
} else {
|
||||
out.writeBoolean(true);
|
||||
fuzziness.writeTo(out);
|
||||
}
|
||||
if (cutoffFrequency == null) {
|
||||
out.writeBoolean(false);
|
||||
} else {
|
||||
out.writeBoolean(true);
|
||||
out.writeFloat(cutoffFrequency);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getWriteableName() {
|
||||
return NAME;
|
||||
|
|
|
@ -19,22 +19,19 @@
|
|||
|
||||
package org.elasticsearch.index.query;
|
||||
|
||||
import org.apache.lucene.queries.ExtendedCommonTermsQuery;
|
||||
import org.apache.lucene.search.BooleanQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.FuzzyQuery;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
import org.elasticsearch.common.unit.Fuzziness;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.query.support.QueryParsers;
|
||||
import org.elasticsearch.index.search.MatchQuery;
|
||||
import org.elasticsearch.index.search.MatchQuery.ZeroTermsQuery;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class MatchQueryParser extends BaseQueryParserTemp {
|
||||
public class MatchQueryParser extends BaseQueryParser {
|
||||
|
||||
@Inject
|
||||
public MatchQueryParser() {
|
||||
|
@ -48,8 +45,7 @@ public class MatchQueryParser extends BaseQueryParserTemp {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Query parse(QueryShardContext context) throws IOException, QueryParsingException {
|
||||
QueryParseContext parseContext = context.parseContext();
|
||||
public MatchQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException, QueryParsingException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
|
||||
MatchQuery.Type type = MatchQuery.Type.BOOLEAN;
|
||||
|
@ -69,8 +65,18 @@ public class MatchQueryParser extends BaseQueryParserTemp {
|
|||
|
||||
Object value = null;
|
||||
float boost = AbstractQueryBuilder.DEFAULT_BOOST;
|
||||
MatchQuery matchQuery = new MatchQuery(context);
|
||||
String minimumShouldMatch = null;
|
||||
String analyzer = null;
|
||||
Operator operator = MatchQueryBuilder.DEFAULT_OPERATOR;
|
||||
int slop = MatchQuery.DEFAULT_PHRASE_SLOP;
|
||||
Fuzziness fuzziness = null;
|
||||
int prefixLength = FuzzyQuery.defaultPrefixLength;
|
||||
int maxExpansion = FuzzyQuery.defaultMaxExpansions;
|
||||
boolean fuzzyTranspositions = FuzzyQuery.defaultTranspositions;
|
||||
String fuzzyRewrite = null;
|
||||
boolean lenient = MatchQuery.DEFAULT_LENIENCY;
|
||||
Float cutOffFrequency = null;
|
||||
ZeroTermsQuery zeroTermsQuery = MatchQuery.DEFAULT_ZERO_TERMS_QUERY;
|
||||
String queryName = null;
|
||||
|
||||
token = parser.nextToken();
|
||||
|
@ -94,39 +100,35 @@ public class MatchQueryParser extends BaseQueryParserTemp {
|
|||
throw new QueryParsingException(parseContext, "[match] query does not support type " + tStr);
|
||||
}
|
||||
} else if ("analyzer".equals(currentFieldName)) {
|
||||
String analyzer = parser.text();
|
||||
if (context.analysisService().analyzer(analyzer) == null) {
|
||||
throw new QueryParsingException(parseContext, "[match] analyzer [" + parser.text() + "] not found");
|
||||
}
|
||||
matchQuery.setAnalyzer(analyzer);
|
||||
analyzer = parser.text();
|
||||
} else if ("boost".equals(currentFieldName)) {
|
||||
boost = parser.floatValue();
|
||||
} else if ("slop".equals(currentFieldName) || "phrase_slop".equals(currentFieldName) || "phraseSlop".equals(currentFieldName)) {
|
||||
matchQuery.setPhraseSlop(parser.intValue());
|
||||
slop = parser.intValue();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, Fuzziness.FIELD)) {
|
||||
matchQuery.setFuzziness(Fuzziness.parse(parser));
|
||||
fuzziness = Fuzziness.parse(parser);
|
||||
} else if ("prefix_length".equals(currentFieldName) || "prefixLength".equals(currentFieldName)) {
|
||||
matchQuery.setFuzzyPrefixLength(parser.intValue());
|
||||
prefixLength = parser.intValue();
|
||||
} else if ("max_expansions".equals(currentFieldName) || "maxExpansions".equals(currentFieldName)) {
|
||||
matchQuery.setMaxExpansions(parser.intValue());
|
||||
maxExpansion = parser.intValue();
|
||||
} else if ("operator".equals(currentFieldName)) {
|
||||
matchQuery.setOccur(Operator.fromString(parser.text()).toBooleanClauseOccur());
|
||||
operator = Operator.fromString(parser.text());
|
||||
} else if ("minimum_should_match".equals(currentFieldName) || "minimumShouldMatch".equals(currentFieldName)) {
|
||||
minimumShouldMatch = parser.textOrNull();
|
||||
} else if ("fuzzy_rewrite".equals(currentFieldName) || "fuzzyRewrite".equals(currentFieldName)) {
|
||||
matchQuery.setFuzzyRewriteMethod(QueryParsers.parseRewriteMethod(parseContext.parseFieldMatcher(), parser.textOrNull(), null));
|
||||
fuzzyRewrite = parser.textOrNull();
|
||||
} else if ("fuzzy_transpositions".equals(currentFieldName)) {
|
||||
matchQuery.setTranspositions(parser.booleanValue());
|
||||
fuzzyTranspositions = parser.booleanValue();
|
||||
} else if ("lenient".equals(currentFieldName)) {
|
||||
matchQuery.setLenient(parser.booleanValue());
|
||||
lenient = parser.booleanValue();
|
||||
} else if ("cutoff_frequency".equals(currentFieldName)) {
|
||||
matchQuery.setCommonTermsCutoff(parser.floatValue());
|
||||
cutOffFrequency = parser.floatValue();
|
||||
} else if ("zero_terms_query".equals(currentFieldName)) {
|
||||
String zeroTermsDocs = parser.text();
|
||||
if ("none".equalsIgnoreCase(zeroTermsDocs)) {
|
||||
matchQuery.setZeroTermsQuery(MatchQuery.ZeroTermsQuery.NONE);
|
||||
zeroTermsQuery = MatchQuery.ZeroTermsQuery.NONE;
|
||||
} else if ("all".equalsIgnoreCase(zeroTermsDocs)) {
|
||||
matchQuery.setZeroTermsQuery(MatchQuery.ZeroTermsQuery.ALL);
|
||||
zeroTermsQuery = MatchQuery.ZeroTermsQuery.ALL;
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "Unsupported zero_terms_docs value [" + zeroTermsDocs + "]");
|
||||
}
|
||||
|
@ -152,21 +154,27 @@ public class MatchQueryParser extends BaseQueryParserTemp {
|
|||
throw new QueryParsingException(parseContext, "No text specified for text query");
|
||||
}
|
||||
|
||||
Query query = matchQuery.parse(type, fieldName, value);
|
||||
if (query == null) {
|
||||
return null;
|
||||
MatchQueryBuilder matchQuery = new MatchQueryBuilder(fieldName, value);
|
||||
matchQuery.operator(operator);
|
||||
matchQuery.type(type);
|
||||
matchQuery.analyzer(analyzer);
|
||||
matchQuery.slop(slop);
|
||||
matchQuery.minimumShouldMatch(minimumShouldMatch);
|
||||
if (fuzziness != null) {
|
||||
matchQuery.fuzziness(fuzziness);
|
||||
}
|
||||
|
||||
if (query instanceof BooleanQuery) {
|
||||
query = Queries.applyMinimumShouldMatch((BooleanQuery) query, minimumShouldMatch);
|
||||
} else if (query instanceof ExtendedCommonTermsQuery) {
|
||||
((ExtendedCommonTermsQuery)query).setLowFreqMinimumNumberShouldMatch(minimumShouldMatch);
|
||||
matchQuery.fuzzyRewrite(fuzzyRewrite);
|
||||
matchQuery.prefixLength(prefixLength);
|
||||
matchQuery.fuzzyTranspositions(fuzzyTranspositions);
|
||||
matchQuery.maxExpansions(maxExpansion);
|
||||
matchQuery.lenient(lenient);
|
||||
if (cutOffFrequency != null) {
|
||||
matchQuery.cutoffFrequency(cutOffFrequency);
|
||||
}
|
||||
query.setBoost(boost);
|
||||
if (queryName != null) {
|
||||
context.addNamedQuery(queryName, query);
|
||||
}
|
||||
return query;
|
||||
matchQuery.zeroTermsQuery(zeroTermsQuery);
|
||||
matchQuery.queryName(queryName);
|
||||
matchQuery.boost(boost);
|
||||
return matchQuery;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -72,7 +72,7 @@ public class MultiMatchQueryBuilder extends AbstractQueryBuilder<MultiMatchQuery
|
|||
|
||||
private Float cutoffFrequency = null;
|
||||
|
||||
private MatchQueryBuilder.ZeroTermsQuery zeroTermsQuery = null;
|
||||
private MatchQuery.ZeroTermsQuery zeroTermsQuery = null;
|
||||
|
||||
static final MultiMatchQueryBuilder PROTOTYPE = new MultiMatchQueryBuilder(null);
|
||||
|
||||
|
@ -296,7 +296,7 @@ public class MultiMatchQueryBuilder extends AbstractQueryBuilder<MultiMatchQuery
|
|||
}
|
||||
|
||||
|
||||
public MultiMatchQueryBuilder zeroTermsQuery(MatchQueryBuilder.ZeroTermsQuery zeroTermsQuery) {
|
||||
public MultiMatchQueryBuilder zeroTermsQuery(MatchQuery.ZeroTermsQuery zeroTermsQuery) {
|
||||
this.zeroTermsQuery = zeroTermsQuery;
|
||||
return this;
|
||||
}
|
||||
|
|
|
@ -27,6 +27,7 @@ import org.elasticsearch.common.geo.ShapeRelation;
|
|||
import org.elasticsearch.common.geo.builders.ShapeBuilder;
|
||||
import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder;
|
||||
import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder;
|
||||
import org.elasticsearch.index.search.MatchQuery;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.script.Template;
|
||||
|
@ -53,7 +54,7 @@ public abstract class QueryBuilders {
|
|||
* @param text The query text (to be analyzed).
|
||||
*/
|
||||
public static MatchQueryBuilder matchQuery(String name, Object text) {
|
||||
return new MatchQueryBuilder(name, text).type(MatchQueryBuilder.Type.BOOLEAN);
|
||||
return new MatchQueryBuilder(name, text).type(MatchQuery.Type.BOOLEAN);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -83,7 +84,7 @@ public abstract class QueryBuilders {
|
|||
* @param text The query text (to be analyzed).
|
||||
*/
|
||||
public static MatchQueryBuilder matchPhraseQuery(String name, Object text) {
|
||||
return new MatchQueryBuilder(name, text).type(MatchQueryBuilder.Type.PHRASE);
|
||||
return new MatchQueryBuilder(name, text).type(MatchQuery.Type.PHRASE);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -93,7 +94,7 @@ public abstract class QueryBuilders {
|
|||
* @param text The query text (to be analyzed).
|
||||
*/
|
||||
public static MatchQueryBuilder matchPhrasePrefixQuery(String name, Object text) {
|
||||
return new MatchQueryBuilder(name, text).type(MatchQueryBuilder.Type.PHRASE_PREFIX);
|
||||
return new MatchQueryBuilder(name, text).type(MatchQuery.Type.PHRASE_PREFIX);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -25,7 +25,11 @@ import org.apache.lucene.queries.ExtendedCommonTermsQuery;
|
|||
import org.apache.lucene.search.*;
|
||||
import org.apache.lucene.search.BooleanClause.Occur;
|
||||
import org.apache.lucene.util.QueryBuilder;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.lucene.search.MultiPhrasePrefixQuery;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
import org.elasticsearch.common.unit.Fuzziness;
|
||||
|
@ -38,16 +42,90 @@ import java.util.List;
|
|||
|
||||
public class MatchQuery {
|
||||
|
||||
public static enum Type {
|
||||
BOOLEAN,
|
||||
PHRASE,
|
||||
PHRASE_PREFIX
|
||||
public static enum Type implements Writeable<Type> {
|
||||
/**
|
||||
* The text is analyzed and terms are added to a boolean query.
|
||||
*/
|
||||
BOOLEAN(0),
|
||||
/**
|
||||
* The text is analyzed and used as a phrase query.
|
||||
*/
|
||||
PHRASE(1),
|
||||
/**
|
||||
* The text is analyzed and used in a phrase query, with the last term acting as a prefix.
|
||||
*/
|
||||
PHRASE_PREFIX(2);
|
||||
|
||||
private final int ordinal;
|
||||
|
||||
private static final Type PROTOTYPE = BOOLEAN;
|
||||
|
||||
private Type(int ordinal) {
|
||||
this.ordinal = ordinal;
|
||||
}
|
||||
|
||||
public static enum ZeroTermsQuery {
|
||||
NONE,
|
||||
ALL
|
||||
@Override
|
||||
public Type readFrom(StreamInput in) throws IOException {
|
||||
int ord = in.readVInt();
|
||||
for (Type type : Type.values()) {
|
||||
if (type.ordinal == ord) {
|
||||
return type;
|
||||
}
|
||||
}
|
||||
throw new ElasticsearchException("unknown serialized type [" + ord + "]");
|
||||
}
|
||||
|
||||
public static Type readTypeFrom(StreamInput in) throws IOException {
|
||||
return PROTOTYPE.readFrom(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeVInt(this.ordinal);
|
||||
}
|
||||
}
|
||||
|
||||
public static enum ZeroTermsQuery implements Writeable<ZeroTermsQuery> {
|
||||
NONE(0),
|
||||
ALL(1);
|
||||
|
||||
private final int ordinal;
|
||||
|
||||
private static final ZeroTermsQuery PROTOTYPE = NONE;
|
||||
|
||||
private ZeroTermsQuery(int ordinal) {
|
||||
this.ordinal = ordinal;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ZeroTermsQuery readFrom(StreamInput in) throws IOException {
|
||||
int ord = in.readVInt();
|
||||
for (ZeroTermsQuery zeroTermsQuery : ZeroTermsQuery.values()) {
|
||||
if (zeroTermsQuery.ordinal == ord) {
|
||||
return zeroTermsQuery;
|
||||
}
|
||||
}
|
||||
throw new ElasticsearchException("unknown serialized type [" + ord + "]");
|
||||
}
|
||||
|
||||
public static ZeroTermsQuery readZeroTermsQueryFrom(StreamInput in) throws IOException {
|
||||
return PROTOTYPE.readFrom(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeVInt(this.ordinal);
|
||||
}
|
||||
}
|
||||
|
||||
/** the default phrase slop */
|
||||
public static final int DEFAULT_PHRASE_SLOP = 0;
|
||||
|
||||
/** the default leniency setting */
|
||||
public static final boolean DEFAULT_LENIENCY = false;
|
||||
|
||||
/** the default zero terms query */
|
||||
public static final ZeroTermsQuery DEFAULT_ZERO_TERMS_QUERY = ZeroTermsQuery.NONE;
|
||||
|
||||
protected final QueryShardContext context;
|
||||
|
||||
|
@ -57,7 +135,7 @@ public class MatchQuery {
|
|||
|
||||
protected boolean enablePositionIncrements = true;
|
||||
|
||||
protected int phraseSlop = 0;
|
||||
protected int phraseSlop = DEFAULT_PHRASE_SLOP;
|
||||
|
||||
protected Fuzziness fuzziness = null;
|
||||
|
||||
|
@ -69,9 +147,9 @@ public class MatchQuery {
|
|||
|
||||
protected MultiTermQuery.RewriteMethod fuzzyRewriteMethod;
|
||||
|
||||
protected boolean lenient;
|
||||
protected boolean lenient = DEFAULT_LENIENCY;
|
||||
|
||||
protected ZeroTermsQuery zeroTermsQuery = ZeroTermsQuery.NONE;
|
||||
protected ZeroTermsQuery zeroTermsQuery = DEFAULT_ZERO_TERMS_QUERY;
|
||||
|
||||
protected Float commonTermsCutoff = null;
|
||||
|
||||
|
@ -87,8 +165,8 @@ public class MatchQuery {
|
|||
this.occur = occur;
|
||||
}
|
||||
|
||||
public void setCommonTermsCutoff(float cutoff) {
|
||||
this.commonTermsCutoff = Float.valueOf(cutoff);
|
||||
public void setCommonTermsCutoff(Float cutoff) {
|
||||
this.commonTermsCutoff = cutoff;
|
||||
}
|
||||
|
||||
public void setEnablePositionIncrements(boolean enablePositionIncrements) {
|
||||
|
@ -198,7 +276,7 @@ public class MatchQuery {
|
|||
}
|
||||
|
||||
protected Query zeroTermsQuery() {
|
||||
return zeroTermsQuery == ZeroTermsQuery.NONE ? Queries.newMatchNoDocsQuery() : Queries.newMatchAllQuery();
|
||||
return zeroTermsQuery == DEFAULT_ZERO_TERMS_QUERY ? Queries.newMatchNoDocsQuery() : Queries.newMatchAllQuery();
|
||||
}
|
||||
|
||||
private class MatchQueryBuilder extends QueryBuilder {
|
||||
|
|
|
@ -0,0 +1,177 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.query;
|
||||
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.queries.ExtendedCommonTermsQuery;
|
||||
import org.apache.lucene.search.BooleanQuery;
|
||||
import org.apache.lucene.search.FuzzyQuery;
|
||||
import org.apache.lucene.search.MatchAllDocsQuery;
|
||||
import org.apache.lucene.search.PhraseQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.elasticsearch.common.lucene.BytesRefs;
|
||||
import org.elasticsearch.common.lucene.search.MultiPhrasePrefixQuery;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.search.MatchQuery;
|
||||
import org.elasticsearch.index.search.MatchQuery.ZeroTermsQuery;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Locale;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.either;
|
||||
import static org.hamcrest.CoreMatchers.instanceOf;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
|
||||
public class MatchQueryBuilderTests extends BaseQueryTestCase<MatchQueryBuilder> {
|
||||
|
||||
@Override
|
||||
protected MatchQueryBuilder doCreateTestQueryBuilder() {
|
||||
String fieldName = randomFrom(new String[] { STRING_FIELD_NAME, BOOLEAN_FIELD_NAME, INT_FIELD_NAME, DOUBLE_FIELD_NAME });
|
||||
Object value = "";
|
||||
if (fieldName.equals(STRING_FIELD_NAME)) {
|
||||
int terms = randomIntBetween(0, 3);
|
||||
for (int i = 0; i < terms; i++) {
|
||||
value += randomAsciiOfLengthBetween(1, 10) + " ";
|
||||
}
|
||||
value = ((String) value).trim();
|
||||
} else {
|
||||
value = getRandomValueForFieldName(fieldName);
|
||||
}
|
||||
|
||||
MatchQueryBuilder matchQuery = new MatchQueryBuilder(fieldName, value);
|
||||
matchQuery.type(randomFrom(MatchQuery.Type.values()));
|
||||
matchQuery.operator(randomFrom(Operator.values()));
|
||||
|
||||
if (randomBoolean()) {
|
||||
matchQuery.analyzer(randomFrom("simple", "keyword", "whitespace"));
|
||||
}
|
||||
|
||||
if (randomBoolean()) {
|
||||
matchQuery.slop(randomIntBetween(0, 10));
|
||||
}
|
||||
|
||||
if (randomBoolean()) {
|
||||
matchQuery.fuzziness(randomFuzziness(fieldName));
|
||||
}
|
||||
|
||||
if (randomBoolean()) {
|
||||
matchQuery.prefixLength(randomIntBetween(0, 10));
|
||||
}
|
||||
|
||||
if (randomBoolean()) {
|
||||
matchQuery.minimumShouldMatch(randomMinimumShouldMatch());
|
||||
}
|
||||
|
||||
if (randomBoolean()) {
|
||||
matchQuery.fuzzyRewrite(getRandomRewriteMethod());
|
||||
}
|
||||
|
||||
if (randomBoolean()) {
|
||||
matchQuery.fuzzyTranspositions(randomBoolean());
|
||||
}
|
||||
|
||||
if (randomBoolean()) {
|
||||
matchQuery.lenient(randomBoolean());
|
||||
}
|
||||
|
||||
if (randomBoolean()) {
|
||||
matchQuery.zeroTermsQuery(randomFrom(MatchQuery.ZeroTermsQuery.values()));
|
||||
}
|
||||
|
||||
if (randomBoolean()) {
|
||||
matchQuery.cutoffFrequency((float) 10 / randomIntBetween(1, 100));
|
||||
}
|
||||
return matchQuery;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doAssertLuceneQuery(MatchQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
||||
assertThat(query, notNullValue());
|
||||
|
||||
if (query instanceof MatchAllDocsQuery) {
|
||||
assertThat(queryBuilder.zeroTermsQuery(), equalTo(ZeroTermsQuery.ALL));
|
||||
return;
|
||||
}
|
||||
|
||||
switch (queryBuilder.type()) {
|
||||
case BOOLEAN:
|
||||
assertThat(query, either(instanceOf(BooleanQuery.class)).or(instanceOf(ExtendedCommonTermsQuery.class))
|
||||
.or(instanceOf(TermQuery.class)).or(instanceOf(FuzzyQuery.class)));
|
||||
break;
|
||||
case PHRASE:
|
||||
assertThat(query, either(instanceOf(BooleanQuery.class)).or(instanceOf(PhraseQuery.class))
|
||||
.or(instanceOf(TermQuery.class)).or(instanceOf(FuzzyQuery.class)));
|
||||
break;
|
||||
case PHRASE_PREFIX:
|
||||
assertThat(query, either(instanceOf(BooleanQuery.class)).or(instanceOf(MultiPhrasePrefixQuery.class))
|
||||
.or(instanceOf(TermQuery.class)).or(instanceOf(FuzzyQuery.class)));
|
||||
break;
|
||||
}
|
||||
|
||||
MappedFieldType fieldType = context.fieldMapper(queryBuilder.fieldName());
|
||||
if (query instanceof TermQuery && fieldType != null) {
|
||||
String queryValue = queryBuilder.value().toString();
|
||||
if (queryBuilder.analyzer() == null || queryBuilder.analyzer().equals("simple")) {
|
||||
queryValue = queryValue.toLowerCase(Locale.ROOT);
|
||||
}
|
||||
Query expectedTermQuery = fieldType.termQuery(queryValue, context);
|
||||
// the real query will have boost applied, so we set it to our expeced as well
|
||||
expectedTermQuery.setBoost(queryBuilder.boost());
|
||||
assertEquals(expectedTermQuery, query);
|
||||
}
|
||||
|
||||
if (query instanceof BooleanQuery) {
|
||||
BooleanQuery bq = (BooleanQuery) query;
|
||||
if (queryBuilder.analyzer() == null && queryBuilder.value().toString().length() > 0) {
|
||||
assertEquals(bq.clauses().size(), queryBuilder.value().toString().split(" ").length);
|
||||
}
|
||||
}
|
||||
|
||||
if (query instanceof ExtendedCommonTermsQuery) {
|
||||
assertTrue(queryBuilder.cutoffFrequency() != null);
|
||||
ExtendedCommonTermsQuery ectq = (ExtendedCommonTermsQuery) query;
|
||||
assertEquals((float) queryBuilder.cutoffFrequency(), ectq.getMaxTermFrequency(), Float.MIN_VALUE);
|
||||
}
|
||||
|
||||
if (query instanceof FuzzyQuery) {
|
||||
assertTrue(queryBuilder.fuzziness() != null);
|
||||
FuzzyQuery fuzzyQuery = (FuzzyQuery) query;
|
||||
fuzzyQuery.getTerm().equals(new Term(STRING_FIELD_NAME, BytesRefs.toBytesRef(queryBuilder.value())));
|
||||
assertThat(queryBuilder.prefixLength(), equalTo(fuzzyQuery.getPrefixLength()));
|
||||
assertThat(queryBuilder.fuzzyTranspositions(), equalTo(fuzzyQuery.getTranspositions()));
|
||||
}
|
||||
}
|
||||
|
||||
@Test(expected = IllegalArgumentException.class)
|
||||
public void testNegativePrefixLengthException() {
|
||||
MatchQueryBuilder matchQuery = new MatchQueryBuilder("fieldName", "text");
|
||||
matchQuery.prefixLength(-1); // not allowed, should trigger expection
|
||||
}
|
||||
|
||||
@Test(expected = QueryShardException.class)
|
||||
public void testBadAnalyzer() throws IOException {
|
||||
MatchQueryBuilder matchQuery = new MatchQueryBuilder("fieldName", "text");
|
||||
matchQuery.analyzer("bogusAnalyzer");
|
||||
matchQuery.doToQuery(createShardContext());
|
||||
}
|
||||
}
|
|
@ -30,7 +30,8 @@ import org.elasticsearch.common.xcontent.XContentFactory;
|
|||
import org.elasticsearch.index.query.*;
|
||||
import org.elasticsearch.index.query.IdsQueryBuilder;
|
||||
import org.elasticsearch.index.query.MatchQueryBuilder;
|
||||
import org.elasticsearch.index.query.MatchQueryBuilder.Type;
|
||||
import org.elasticsearch.index.search.MatchQuery.Type;
|
||||
import org.elasticsearch.index.search.MatchQuery;
|
||||
import org.elasticsearch.index.query.MultiMatchQueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
|
@ -1536,7 +1537,7 @@ public class HighlighterSearchIT extends ESIntegTestCase {
|
|||
refresh();
|
||||
|
||||
SearchResponse response = client().prepareSearch("test")
|
||||
.setQuery(QueryBuilders.matchQuery("tags", "long tag").type(MatchQueryBuilder.Type.PHRASE))
|
||||
.setQuery(QueryBuilders.matchQuery("tags", "long tag").type(MatchQuery.Type.PHRASE))
|
||||
.addHighlightedField(new HighlightBuilder.Field("tags")
|
||||
.fragmentSize(-1).numOfFragments(2).fragmenter("simple")).get();
|
||||
|
||||
|
@ -1544,7 +1545,7 @@ public class HighlighterSearchIT extends ESIntegTestCase {
|
|||
assertHighlight(response, 0, "tags", 1, 2, equalTo("here is another one that is very <em>long</em> <em>tag</em> and has the <em>tag</em> token near the end"));
|
||||
|
||||
response = client().prepareSearch("test")
|
||||
.setQuery(QueryBuilders.matchQuery("tags", "long tag").type(MatchQueryBuilder.Type.PHRASE))
|
||||
.setQuery(QueryBuilders.matchQuery("tags", "long tag").type(MatchQuery.Type.PHRASE))
|
||||
.addHighlightedField(new HighlightBuilder.Field("tags")
|
||||
.fragmentSize(-1).numOfFragments(2).fragmenter("span")).get();
|
||||
|
||||
|
@ -1552,7 +1553,7 @@ public class HighlighterSearchIT extends ESIntegTestCase {
|
|||
assertHighlight(response, 0, "tags", 1, 2, equalTo("here is another one that is very <em>long</em> <em>tag</em> and has the <em>tag</em> token near the end"));
|
||||
|
||||
assertFailures(client().prepareSearch("test")
|
||||
.setQuery(QueryBuilders.matchQuery("tags", "long tag").type(MatchQueryBuilder.Type.PHRASE))
|
||||
.setQuery(QueryBuilders.matchQuery("tags", "long tag").type(MatchQuery.Type.PHRASE))
|
||||
.addHighlightedField(new HighlightBuilder.Field("tags")
|
||||
.fragmentSize(-1).numOfFragments(2).fragmenter("invalid")),
|
||||
RestStatus.BAD_REQUEST,
|
||||
|
@ -1607,7 +1608,7 @@ public class HighlighterSearchIT extends ESIntegTestCase {
|
|||
|
||||
// This query used to fail when the field to highlight was absent
|
||||
SearchResponse response = client().prepareSearch("test")
|
||||
.setQuery(QueryBuilders.matchQuery("field", "highlight").type(MatchQueryBuilder.Type.BOOLEAN))
|
||||
.setQuery(QueryBuilders.matchQuery("field", "highlight").type(MatchQuery.Type.BOOLEAN))
|
||||
.addHighlightedField(new HighlightBuilder.Field("highlight_field")
|
||||
.fragmentSize(-1).numOfFragments(1).fragmenter("simple")).get();
|
||||
assertThat(response.getHits().hits()[0].highlightFields().isEmpty(), equalTo(true));
|
||||
|
@ -1627,7 +1628,7 @@ public class HighlighterSearchIT extends ESIntegTestCase {
|
|||
refresh();
|
||||
|
||||
SearchResponse response = client().prepareSearch("test")
|
||||
.setQuery(QueryBuilders.matchQuery("text", "test").type(MatchQueryBuilder.Type.BOOLEAN))
|
||||
.setQuery(QueryBuilders.matchQuery("text", "test").type(MatchQuery.Type.BOOLEAN))
|
||||
.addHighlightedField("text")
|
||||
.addHighlightedField("byte")
|
||||
.addHighlightedField("short")
|
||||
|
@ -1657,7 +1658,7 @@ public class HighlighterSearchIT extends ESIntegTestCase {
|
|||
refresh();
|
||||
|
||||
SearchResponse response = client().prepareSearch("test")
|
||||
.setQuery(QueryBuilders.matchQuery("text", "test").type(MatchQueryBuilder.Type.BOOLEAN))
|
||||
.setQuery(QueryBuilders.matchQuery("text", "test").type(MatchQuery.Type.BOOLEAN))
|
||||
.addHighlightedField("text").execute().actionGet();
|
||||
// PatternAnalyzer will throw an exception if it is resetted twice
|
||||
assertHitCount(response, 1l);
|
||||
|
|
|
@ -30,6 +30,7 @@ import org.elasticsearch.index.query.MatchQueryBuilder;
|
|||
import org.elasticsearch.index.query.MultiMatchQueryBuilder;
|
||||
import org.elasticsearch.index.query.Operator;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.index.search.MatchQuery;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.search.SearchHits;
|
||||
import org.elasticsearch.search.sort.SortBuilders;
|
||||
|
@ -152,7 +153,7 @@ public class MultiMatchQueryIT extends ESIntegTestCase {
|
|||
|
||||
@Test
|
||||
public void testDefaults() throws ExecutionException, InterruptedException {
|
||||
MatchQueryBuilder.Type type = randomBoolean() ? null : MatchQueryBuilder.Type.BOOLEAN;
|
||||
MatchQuery.Type type = randomBoolean() ? null : MatchQuery.Type.BOOLEAN;
|
||||
SearchResponse searchResponse = client().prepareSearch("test")
|
||||
.setQuery(randomizeType(multiMatchQuery("marvel hero captain america", "full_name", "first_name", "last_name", "category")
|
||||
.operator(Operator.OR))).get();
|
||||
|
@ -194,18 +195,18 @@ public class MultiMatchQueryIT extends ESIntegTestCase {
|
|||
public void testPhraseType() {
|
||||
SearchResponse searchResponse = client().prepareSearch("test")
|
||||
.setQuery(randomizeType(multiMatchQuery("Man the Ultimate", "full_name_phrase", "first_name_phrase", "last_name_phrase", "category_phrase")
|
||||
.operator(Operator.OR).type(MatchQueryBuilder.Type.PHRASE))).get();
|
||||
.operator(Operator.OR).type(MatchQuery.Type.PHRASE))).get();
|
||||
assertFirstHit(searchResponse, hasId("ultimate2"));
|
||||
assertHitCount(searchResponse, 1l);
|
||||
|
||||
searchResponse = client().prepareSearch("test")
|
||||
.setQuery(randomizeType(multiMatchQuery("Captain", "full_name_phrase", "first_name_phrase", "last_name_phrase", "category_phrase")
|
||||
.operator(Operator.OR).type(MatchQueryBuilder.Type.PHRASE))).get();
|
||||
.operator(Operator.OR).type(MatchQuery.Type.PHRASE))).get();
|
||||
assertThat(searchResponse.getHits().getTotalHits(), greaterThan(1l));
|
||||
|
||||
searchResponse = client().prepareSearch("test")
|
||||
.setQuery(randomizeType(multiMatchQuery("the Ul", "full_name_phrase", "first_name_phrase", "last_name_phrase", "category_phrase")
|
||||
.operator(Operator.OR).type(MatchQueryBuilder.Type.PHRASE_PREFIX))).get();
|
||||
.operator(Operator.OR).type(MatchQuery.Type.PHRASE_PREFIX))).get();
|
||||
assertSearchHits(searchResponse, "ultimate2", "ultimate1");
|
||||
assertHitCount(searchResponse, 2l);
|
||||
}
|
||||
|
@ -239,7 +240,7 @@ public class MultiMatchQueryIT extends ESIntegTestCase {
|
|||
.setQuery(multiMatchQueryBuilder).get();
|
||||
MatchQueryBuilder matchQueryBuilder = QueryBuilders.matchQuery(field, builder.toString());
|
||||
if (getType(multiMatchQueryBuilder) != null) {
|
||||
matchQueryBuilder.type(MatchQueryBuilder.Type.valueOf(getType(multiMatchQueryBuilder).matchQueryType().toString()));
|
||||
matchQueryBuilder.type(MatchQuery.Type.valueOf(getType(multiMatchQueryBuilder).matchQueryType().toString()));
|
||||
}
|
||||
SearchResponse matchResp = client().prepareSearch("test")
|
||||
// _uid tie sort
|
||||
|
@ -260,7 +261,7 @@ public class MultiMatchQueryIT extends ESIntegTestCase {
|
|||
public void testCutoffFreq() throws ExecutionException, InterruptedException {
|
||||
final long numDocs = client().prepareCount("test")
|
||||
.setQuery(matchAllQuery()).get().getCount();
|
||||
MatchQueryBuilder.Type type = randomBoolean() ? null : MatchQueryBuilder.Type.BOOLEAN;
|
||||
MatchQuery.Type type = randomBoolean() ? null : MatchQuery.Type.BOOLEAN;
|
||||
Float cutoffFrequency = randomBoolean() ? Math.min(1, numDocs * 1.f / between(10, 20)) : 1.f / between(10, 20);
|
||||
SearchResponse searchResponse = client().prepareSearch("test")
|
||||
.setQuery(randomizeType(multiMatchQuery("marvel hero captain america", "full_name", "first_name", "last_name", "category")
|
||||
|
@ -325,7 +326,7 @@ public class MultiMatchQueryIT extends ESIntegTestCase {
|
|||
int numIters = scaledRandomIntBetween(5, 10);
|
||||
for (int i = 0; i < numIters; i++) {
|
||||
{
|
||||
MatchQueryBuilder.Type type = randomBoolean() ? null : MatchQueryBuilder.Type.BOOLEAN;
|
||||
MatchQuery.Type type = randomBoolean() ? null : MatchQuery.Type.BOOLEAN;
|
||||
MultiMatchQueryBuilder multiMatchQueryBuilder = randomBoolean() ? multiMatchQuery("marvel hero captain america", "full_name", "first_name", "last_name", "category") :
|
||||
multiMatchQuery("marvel hero captain america", "*_name", randomBoolean() ? "category" : "categ*");
|
||||
SearchResponse left = client().prepareSearch("test").setSize(numDocs)
|
||||
|
@ -345,7 +346,7 @@ public class MultiMatchQueryIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
{
|
||||
MatchQueryBuilder.Type type = randomBoolean() ? null : MatchQueryBuilder.Type.BOOLEAN;
|
||||
MatchQuery.Type type = randomBoolean() ? null : MatchQuery.Type.BOOLEAN;
|
||||
String minShouldMatch = randomBoolean() ? null : "" + between(0, 1);
|
||||
Operator op = randomBoolean() ? Operator.AND : Operator.OR;
|
||||
MultiMatchQueryBuilder multiMatchQueryBuilder = randomBoolean() ? multiMatchQuery("captain america", "full_name", "first_name", "last_name", "category") :
|
||||
|
@ -372,7 +373,7 @@ public class MultiMatchQueryIT extends ESIntegTestCase {
|
|||
SearchResponse left = client().prepareSearch("test").setSize(numDocs)
|
||||
.addSort(SortBuilders.scoreSort()).addSort(SortBuilders.fieldSort("_uid"))
|
||||
.setQuery(randomizeType(multiMatchQuery("capta", "full_name", "first_name", "last_name", "category")
|
||||
.type(MatchQueryBuilder.Type.PHRASE_PREFIX).useDisMax(false).minimumShouldMatch(minShouldMatch))).get();
|
||||
.type(MatchQuery.Type.PHRASE_PREFIX).useDisMax(false).minimumShouldMatch(minShouldMatch))).get();
|
||||
|
||||
SearchResponse right = client().prepareSearch("test").setSize(numDocs)
|
||||
.addSort(SortBuilders.scoreSort()).addSort(SortBuilders.fieldSort("_uid"))
|
||||
|
@ -392,12 +393,12 @@ public class MultiMatchQueryIT extends ESIntegTestCase {
|
|||
left = client().prepareSearch("test").setSize(numDocs)
|
||||
.addSort(SortBuilders.scoreSort()).addSort(SortBuilders.fieldSort("_uid"))
|
||||
.setQuery(randomizeType(multiMatchQuery("captain america", "full_name", "first_name", "last_name", "category")
|
||||
.type(MatchQueryBuilder.Type.PHRASE).useDisMax(false).minimumShouldMatch(minShouldMatch))).get();
|
||||
.type(MatchQuery.Type.PHRASE).useDisMax(false).minimumShouldMatch(minShouldMatch))).get();
|
||||
} else {
|
||||
left = client().prepareSearch("test").setSize(numDocs)
|
||||
.addSort(SortBuilders.scoreSort()).addSort(SortBuilders.fieldSort("_uid"))
|
||||
.setQuery(randomizeType(multiMatchQuery("captain america", "full_name", "first_name", "last_name", "category")
|
||||
.type(MatchQueryBuilder.Type.PHRASE).tieBreaker(1.0f).minimumShouldMatch(minShouldMatch))).get();
|
||||
.type(MatchQuery.Type.PHRASE).tieBreaker(1.0f).minimumShouldMatch(minShouldMatch))).get();
|
||||
}
|
||||
SearchResponse right = client().prepareSearch("test").setSize(numDocs)
|
||||
.addSort(SortBuilders.scoreSort()).addSort(SortBuilders.fieldSort("_uid"))
|
||||
|
@ -582,24 +583,24 @@ public class MultiMatchQueryIT extends ESIntegTestCase {
|
|||
switch (type) {
|
||||
case BEST_FIELDS:
|
||||
if (randomBoolean()) {
|
||||
oType = MatchQueryBuilder.Type.BOOLEAN;
|
||||
oType = MatchQuery.Type.BOOLEAN;
|
||||
}
|
||||
break;
|
||||
case MOST_FIELDS:
|
||||
if (randomBoolean()) {
|
||||
oType = MatchQueryBuilder.Type.BOOLEAN;
|
||||
oType = MatchQuery.Type.BOOLEAN;
|
||||
}
|
||||
break;
|
||||
case CROSS_FIELDS:
|
||||
break;
|
||||
case PHRASE:
|
||||
if (randomBoolean()) {
|
||||
oType = MatchQueryBuilder.Type.PHRASE;
|
||||
oType = MatchQuery.Type.PHRASE;
|
||||
}
|
||||
break;
|
||||
case PHRASE_PREFIX:
|
||||
if (randomBoolean()) {
|
||||
oType = MatchQueryBuilder.Type.PHRASE_PREFIX;
|
||||
oType = MatchQuery.Type.PHRASE_PREFIX;
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
|
|
@ -32,7 +32,8 @@ import org.elasticsearch.common.settings.Settings;
|
|||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.query.*;
|
||||
import org.elasticsearch.index.query.MatchQueryBuilder.Type;
|
||||
import org.elasticsearch.index.search.MatchQuery.Type;
|
||||
import org.elasticsearch.index.search.MatchQuery;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
|
@ -163,7 +164,7 @@ public class SearchQueryIT extends ESIntegTestCase {
|
|||
client().prepareIndex("test", "type1", "1").setSource("field1", "quick brown fox", "field2", "quick brown fox"),
|
||||
client().prepareIndex("test", "type1", "2").setSource("field1", "quick lazy huge brown fox", "field2", "quick lazy huge brown fox"));
|
||||
|
||||
SearchResponse searchResponse = client().prepareSearch().setQuery(matchQuery("field2", "quick brown").type(MatchQueryBuilder.Type.PHRASE).slop(0)).get();
|
||||
SearchResponse searchResponse = client().prepareSearch().setQuery(matchQuery("field2", "quick brown").type(Type.PHRASE).slop(0)).get();
|
||||
assertHitCount(searchResponse, 1l);
|
||||
|
||||
assertFailures(client().prepareSearch().setQuery(matchQuery("field1", "quick brown").type(Type.PHRASE).slop(0)),
|
||||
|
@ -467,10 +468,10 @@ public class SearchQueryIT extends ESIntegTestCase {
|
|||
client().prepareIndex("test", "type1", "2").setSource("field1", "quick lazy huge brown fox", "field2", "quick lazy huge brown fox"));
|
||||
|
||||
|
||||
SearchResponse searchResponse = client().prepareSearch().setQuery(matchQuery("field2", "quick brown").type(MatchQueryBuilder.Type.PHRASE).slop(0)).get();
|
||||
SearchResponse searchResponse = client().prepareSearch().setQuery(matchQuery("field2", "quick brown").type(Type.PHRASE).slop(0)).get();
|
||||
assertHitCount(searchResponse, 1l);
|
||||
try {
|
||||
client().prepareSearch().setQuery(matchQuery("field1", "quick brown").type(MatchQueryBuilder.Type.PHRASE).slop(0)).get();
|
||||
client().prepareSearch().setQuery(matchQuery("field1", "quick brown").type(Type.PHRASE).slop(0)).get();
|
||||
fail("SearchPhaseExecutionException should have been thrown");
|
||||
} catch (SearchPhaseExecutionException e) {
|
||||
assertTrue(e.toString().contains("IllegalStateException[field \"field1\" was indexed without position data; cannot run PhraseQuery"));
|
||||
|
@ -960,18 +961,18 @@ public class SearchQueryIT extends ESIntegTestCase {
|
|||
refresh();
|
||||
|
||||
BoolQueryBuilder boolQuery = boolQuery()
|
||||
.must(matchQuery("field1", "a").zeroTermsQuery(MatchQueryBuilder.ZeroTermsQuery.NONE))
|
||||
.must(matchQuery("field1", "value1").zeroTermsQuery(MatchQueryBuilder.ZeroTermsQuery.NONE));
|
||||
.must(matchQuery("field1", "a").zeroTermsQuery(MatchQuery.ZeroTermsQuery.NONE))
|
||||
.must(matchQuery("field1", "value1").zeroTermsQuery(MatchQuery.ZeroTermsQuery.NONE));
|
||||
SearchResponse searchResponse = client().prepareSearch().setQuery(boolQuery).get();
|
||||
assertHitCount(searchResponse, 0l);
|
||||
|
||||
boolQuery = boolQuery()
|
||||
.must(matchQuery("field1", "a").zeroTermsQuery(MatchQueryBuilder.ZeroTermsQuery.ALL))
|
||||
.must(matchQuery("field1", "value1").zeroTermsQuery(MatchQueryBuilder.ZeroTermsQuery.ALL));
|
||||
.must(matchQuery("field1", "a").zeroTermsQuery(MatchQuery.ZeroTermsQuery.ALL))
|
||||
.must(matchQuery("field1", "value1").zeroTermsQuery(MatchQuery.ZeroTermsQuery.ALL));
|
||||
searchResponse = client().prepareSearch().setQuery(boolQuery).get();
|
||||
assertHitCount(searchResponse, 1l);
|
||||
|
||||
boolQuery = boolQuery().must(matchQuery("field1", "a").zeroTermsQuery(MatchQueryBuilder.ZeroTermsQuery.ALL));
|
||||
boolQuery = boolQuery().must(matchQuery("field1", "a").zeroTermsQuery(MatchQuery.ZeroTermsQuery.ALL));
|
||||
searchResponse = client().prepareSearch().setQuery(boolQuery).get();
|
||||
assertHitCount(searchResponse, 2l);
|
||||
}
|
||||
|
@ -985,18 +986,18 @@ public class SearchQueryIT extends ESIntegTestCase {
|
|||
|
||||
|
||||
BoolQueryBuilder boolQuery = boolQuery()
|
||||
.must(multiMatchQuery("a", "field1", "field2").zeroTermsQuery(MatchQueryBuilder.ZeroTermsQuery.NONE))
|
||||
.must(multiMatchQuery("value1", "field1", "field2").zeroTermsQuery(MatchQueryBuilder.ZeroTermsQuery.NONE)); // Fields are ORed together
|
||||
.must(multiMatchQuery("a", "field1", "field2").zeroTermsQuery(MatchQuery.ZeroTermsQuery.NONE))
|
||||
.must(multiMatchQuery("value1", "field1", "field2").zeroTermsQuery(MatchQuery.ZeroTermsQuery.NONE)); // Fields are ORed together
|
||||
SearchResponse searchResponse = client().prepareSearch().setQuery(boolQuery).get();
|
||||
assertHitCount(searchResponse, 0l);
|
||||
|
||||
boolQuery = boolQuery()
|
||||
.must(multiMatchQuery("a", "field1", "field2").zeroTermsQuery(MatchQueryBuilder.ZeroTermsQuery.ALL))
|
||||
.must(multiMatchQuery("value4", "field1", "field2").zeroTermsQuery(MatchQueryBuilder.ZeroTermsQuery.ALL));
|
||||
.must(multiMatchQuery("a", "field1", "field2").zeroTermsQuery(MatchQuery.ZeroTermsQuery.ALL))
|
||||
.must(multiMatchQuery("value4", "field1", "field2").zeroTermsQuery(MatchQuery.ZeroTermsQuery.ALL));
|
||||
searchResponse = client().prepareSearch().setQuery(boolQuery).get();
|
||||
assertHitCount(searchResponse, 1l);
|
||||
|
||||
boolQuery = boolQuery().must(multiMatchQuery("a", "field1").zeroTermsQuery(MatchQueryBuilder.ZeroTermsQuery.ALL));
|
||||
boolQuery = boolQuery().must(multiMatchQuery("a", "field1").zeroTermsQuery(MatchQuery.ZeroTermsQuery.ALL));
|
||||
searchResponse = client().prepareSearch().setQuery(boolQuery).get();
|
||||
assertHitCount(searchResponse, 2l);
|
||||
}
|
||||
|
|
|
@ -84,3 +84,7 @@ InnerHitsBuilder now has a dedicated addParentChildInnerHits and addNestedInnerH
|
|||
to differentiate between inner hits for nested vs. parent / child documents. This change
|
||||
makes the type / path parameter mandatory.
|
||||
|
||||
==== MatchQueryBuilder
|
||||
|
||||
Moving MatchQueryBuilder.Type and MatchQueryBuilder.ZeroTermsQuery enum to MatchQuery.Type.
|
||||
Also reusing new Operator enum.
|
||||
|
|
Loading…
Reference in New Issue