Refactors MultiMatchQueryBuilder and Parser
Relates to #10217 This PR is against the query-refactoring branch. Closes #13405
This commit is contained in:
parent
c5a7fedb23
commit
a3c68f690b
|
@ -19,20 +19,28 @@
|
|||
|
||||
package org.elasticsearch.index.query;
|
||||
|
||||
import com.carrotsearch.hppc.ObjectFloatHashMap;
|
||||
|
||||
import org.apache.lucene.search.FuzzyQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.regex.Regex;
|
||||
import org.elasticsearch.common.unit.Fuzziness;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.query.support.QueryParsers;
|
||||
import org.elasticsearch.index.search.MatchQuery;
|
||||
import org.elasticsearch.index.search.MultiMatchQuery;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.TreeMap;
|
||||
|
||||
/**
|
||||
* Same as {@link MatchQueryBuilder} but supports multiple fields.
|
||||
|
@ -41,42 +49,34 @@ public class MultiMatchQueryBuilder extends AbstractQueryBuilder<MultiMatchQuery
|
|||
|
||||
public static final String NAME = "multi_match";
|
||||
|
||||
private final Object text;
|
||||
|
||||
private final List<String> fields;
|
||||
private ObjectFloatHashMap<String> fieldsBoosts;
|
||||
|
||||
private MultiMatchQueryBuilder.Type type;
|
||||
|
||||
private Operator operator;
|
||||
public static final MultiMatchQueryBuilder.Type DEFAULT_TYPE = MultiMatchQueryBuilder.Type.BEST_FIELDS;
|
||||
public static final Operator DEFAULT_OPERATOR = Operator.OR;
|
||||
public static final int DEFAULT_PHRASE_SLOP = MatchQuery.DEFAULT_PHRASE_SLOP;
|
||||
public static final int DEFAULT_PREFIX_LENGTH = FuzzyQuery.defaultPrefixLength;
|
||||
public static final int DEFAULT_MAX_EXPANSIONS = FuzzyQuery.defaultMaxExpansions;
|
||||
public static final boolean DEFAULT_LENIENCY = MatchQuery.DEFAULT_LENIENCY;
|
||||
public static final MatchQuery.ZeroTermsQuery DEFAULT_ZERO_TERMS_QUERY = MatchQuery.DEFAULT_ZERO_TERMS_QUERY;
|
||||
|
||||
private final Object value;
|
||||
private Map<String, Float> fieldsBoosts = new TreeMap<>();
|
||||
private MultiMatchQueryBuilder.Type type = DEFAULT_TYPE;
|
||||
private Operator operator = DEFAULT_OPERATOR;
|
||||
private String analyzer;
|
||||
|
||||
private Integer slop;
|
||||
|
||||
private int slop = DEFAULT_PHRASE_SLOP;
|
||||
private Fuzziness fuzziness;
|
||||
|
||||
private Integer prefixLength;
|
||||
|
||||
private Integer maxExpansions;
|
||||
|
||||
private int prefixLength = DEFAULT_PREFIX_LENGTH;
|
||||
private int maxExpansions = DEFAULT_MAX_EXPANSIONS;
|
||||
private String minimumShouldMatch;
|
||||
|
||||
private String fuzzyRewrite = null;
|
||||
|
||||
private Boolean useDisMax;
|
||||
|
||||
private Float tieBreaker;
|
||||
|
||||
private Boolean lenient;
|
||||
|
||||
private boolean lenient = DEFAULT_LENIENCY;
|
||||
private Float cutoffFrequency = null;
|
||||
private MatchQuery.ZeroTermsQuery zeroTermsQuery = DEFAULT_ZERO_TERMS_QUERY;
|
||||
|
||||
private MatchQuery.ZeroTermsQuery zeroTermsQuery = null;
|
||||
static final MultiMatchQueryBuilder PROTOTYPE = new MultiMatchQueryBuilder("");
|
||||
|
||||
static final MultiMatchQueryBuilder PROTOTYPE = new MultiMatchQueryBuilder(null);
|
||||
|
||||
public enum Type {
|
||||
public enum Type implements Writeable<Type> {
|
||||
|
||||
/**
|
||||
* Uses the best matching boolean field as main score and uses
|
||||
|
@ -109,6 +109,8 @@ public class MultiMatchQueryBuilder extends AbstractQueryBuilder<MultiMatchQuery
|
|||
*/
|
||||
PHRASE_PREFIX(MatchQuery.Type.PHRASE_PREFIX, 0.0f, new ParseField("phrase_prefix"));
|
||||
|
||||
private static final Type PROTOTYPE = BEST_FIELDS;
|
||||
|
||||
private MatchQuery.Type matchQueryType;
|
||||
private final float tieBreaker;
|
||||
private final ParseField parseField;
|
||||
|
@ -145,6 +147,20 @@ public class MultiMatchQueryBuilder extends AbstractQueryBuilder<MultiMatchQuery
|
|||
}
|
||||
return type;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Type readFrom(StreamInput in) throws IOException {
|
||||
return Type.values()[in.readVInt()];
|
||||
}
|
||||
|
||||
public static Type readTypeFrom(StreamInput in) throws IOException {
|
||||
return PROTOTYPE.readFrom(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeVInt(this.ordinal());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -157,17 +173,28 @@ public class MultiMatchQueryBuilder extends AbstractQueryBuilder<MultiMatchQuery
|
|||
/**
|
||||
* Constructs a new text query.
|
||||
*/
|
||||
public MultiMatchQueryBuilder(Object text, String... fields) {
|
||||
this.fields = new ArrayList<>();
|
||||
this.fields.addAll(Arrays.asList(fields));
|
||||
this.text = text;
|
||||
public MultiMatchQueryBuilder(Object value, String... fields) {
|
||||
if (value == null) {
|
||||
throw new IllegalArgumentException("[" + NAME + "] requires query value");
|
||||
}
|
||||
this.value = value;
|
||||
for (String field : fields) {
|
||||
field(field);
|
||||
}
|
||||
}
|
||||
|
||||
public Object value() {
|
||||
return value;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a field to run the multi match against.
|
||||
*/
|
||||
public MultiMatchQueryBuilder field(String field) {
|
||||
fields.add(field);
|
||||
if (Strings.isEmpty(field)) {
|
||||
throw new IllegalArgumentException("supplied field is null or empty.");
|
||||
}
|
||||
this.fieldsBoosts.put(field, AbstractQueryBuilder.DEFAULT_BOOST);
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -175,18 +202,32 @@ public class MultiMatchQueryBuilder extends AbstractQueryBuilder<MultiMatchQuery
|
|||
* Adds a field to run the multi match against with a specific boost.
|
||||
*/
|
||||
public MultiMatchQueryBuilder field(String field, float boost) {
|
||||
fields.add(field);
|
||||
if (fieldsBoosts == null) {
|
||||
fieldsBoosts = new ObjectFloatHashMap<>();
|
||||
if (Strings.isEmpty(field)) {
|
||||
throw new IllegalArgumentException("supplied field is null or empty.");
|
||||
}
|
||||
fieldsBoosts.put(field, boost);
|
||||
this.fieldsBoosts.put(field, boost);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add several fields to run the query against with a specific boost.
|
||||
*/
|
||||
public MultiMatchQueryBuilder fields(Map<String, Float> fields) {
|
||||
this.fieldsBoosts.putAll(fields);
|
||||
return this;
|
||||
}
|
||||
|
||||
public Map<String, Float> fields() {
|
||||
return fieldsBoosts;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the type of the text query.
|
||||
*/
|
||||
public MultiMatchQueryBuilder type(MultiMatchQueryBuilder.Type type) {
|
||||
if (type == null) {
|
||||
throw new IllegalArgumentException("[" + NAME + "] requires type to be non-null");
|
||||
}
|
||||
this.type = type;
|
||||
return this;
|
||||
}
|
||||
|
@ -195,18 +236,32 @@ public class MultiMatchQueryBuilder extends AbstractQueryBuilder<MultiMatchQuery
|
|||
* Sets the type of the text query.
|
||||
*/
|
||||
public MultiMatchQueryBuilder type(Object type) {
|
||||
this.type = type == null ? null : Type.parse(type.toString().toLowerCase(Locale.ROOT), ParseFieldMatcher.EMPTY);
|
||||
if (type == null) {
|
||||
throw new IllegalArgumentException("[" + NAME + "] requires type to be non-null");
|
||||
}
|
||||
this.type = Type.parse(type.toString().toLowerCase(Locale.ROOT), ParseFieldMatcher.EMPTY);
|
||||
return this;
|
||||
}
|
||||
|
||||
public Type type() {
|
||||
return type;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the operator to use when using a boolean query. Defaults to <tt>OR</tt>.
|
||||
*/
|
||||
public MultiMatchQueryBuilder operator(Operator operator) {
|
||||
if (operator == null) {
|
||||
throw new IllegalArgumentException("[" + NAME + "] requires operator to be non-null");
|
||||
}
|
||||
this.operator = operator;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Operator operator() {
|
||||
return operator;
|
||||
}
|
||||
|
||||
/**
|
||||
* Explicitly set the analyzer to use. Defaults to use explicit mapping config for the field, or, if not
|
||||
* set, the default search analyzer.
|
||||
|
@ -216,56 +271,99 @@ public class MultiMatchQueryBuilder extends AbstractQueryBuilder<MultiMatchQuery
|
|||
return this;
|
||||
}
|
||||
|
||||
public String analyzer() {
|
||||
return analyzer;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the phrase slop if evaluated to a phrase query type.
|
||||
*/
|
||||
public MultiMatchQueryBuilder slop(int slop) {
|
||||
if (slop < 0) {
|
||||
throw new IllegalArgumentException("No negative slop allowed.");
|
||||
}
|
||||
this.slop = slop;
|
||||
return this;
|
||||
}
|
||||
|
||||
public int slop() {
|
||||
return slop;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the fuzziness used when evaluated to a fuzzy query type. Defaults to "AUTO".
|
||||
*/
|
||||
public MultiMatchQueryBuilder fuzziness(Object fuzziness) {
|
||||
this.fuzziness = Fuzziness.build(fuzziness);
|
||||
if (fuzziness != null) {
|
||||
this.fuzziness = Fuzziness.build(fuzziness);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
public Fuzziness fuzziness() {
|
||||
return fuzziness;
|
||||
}
|
||||
|
||||
public MultiMatchQueryBuilder prefixLength(int prefixLength) {
|
||||
if (prefixLength < 0) {
|
||||
throw new IllegalArgumentException("No negative prefix length allowed.");
|
||||
}
|
||||
this.prefixLength = prefixLength;
|
||||
return this;
|
||||
}
|
||||
|
||||
public int prefixLength() {
|
||||
return prefixLength;
|
||||
}
|
||||
|
||||
/**
|
||||
* When using fuzzy or prefix type query, the number of term expansions to use. Defaults to unbounded
|
||||
* so its recommended to set it to a reasonable value for faster execution.
|
||||
*/
|
||||
public MultiMatchQueryBuilder maxExpansions(int maxExpansions) {
|
||||
if (maxExpansions <= 0) {
|
||||
throw new IllegalArgumentException("Max expansions must be strictly great than zero.");
|
||||
}
|
||||
this.maxExpansions = maxExpansions;
|
||||
return this;
|
||||
}
|
||||
|
||||
public int maxExpansions() {
|
||||
return maxExpansions;
|
||||
}
|
||||
|
||||
public MultiMatchQueryBuilder minimumShouldMatch(String minimumShouldMatch) {
|
||||
this.minimumShouldMatch = minimumShouldMatch;
|
||||
return this;
|
||||
}
|
||||
|
||||
public String minimumShouldMatch() {
|
||||
return minimumShouldMatch;
|
||||
}
|
||||
|
||||
public MultiMatchQueryBuilder fuzzyRewrite(String fuzzyRewrite) {
|
||||
this.fuzzyRewrite = fuzzyRewrite;
|
||||
return this;
|
||||
}
|
||||
|
||||
public String fuzzyRewrite() {
|
||||
return fuzzyRewrite;
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated use a tieBreaker of 1.0f to disable "dis-max"
|
||||
* query or select the appropriate {@link Type}
|
||||
*/
|
||||
@Deprecated
|
||||
public MultiMatchQueryBuilder useDisMax(boolean useDisMax) {
|
||||
public MultiMatchQueryBuilder useDisMax(Boolean useDisMax) {
|
||||
this.useDisMax = useDisMax;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Boolean useDisMax() {
|
||||
return useDisMax;
|
||||
}
|
||||
|
||||
/**
|
||||
* <p>Tie-Breaker for "best-match" disjunction queries (OR-Queries).
|
||||
* The tie breaker capability allows documents that match more than one query clause
|
||||
|
@ -283,6 +381,27 @@ public class MultiMatchQueryBuilder extends AbstractQueryBuilder<MultiMatchQuery
|
|||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* <p>Tie-Breaker for "best-match" disjunction queries (OR-Queries).
|
||||
* The tie breaker capability allows documents that match more than one query clause
|
||||
* (in this case on more than one field) to be scored better than documents that
|
||||
* match only the best of the fields, without confusing this with the better case of
|
||||
* two distinct matches in the multiple fields.</p>
|
||||
*
|
||||
* <p>A tie-breaker value of <tt>1.0</tt> is interpreted as a signal to score queries as
|
||||
* "most-match" queries where all matching query clauses are considered for scoring.</p>
|
||||
*
|
||||
* @see Type
|
||||
*/
|
||||
public MultiMatchQueryBuilder tieBreaker(Float tieBreaker) {
|
||||
this.tieBreaker = tieBreaker;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Float tieBreaker() {
|
||||
return tieBreaker;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets whether format based failures will be ignored.
|
||||
*/
|
||||
|
@ -291,6 +410,9 @@ public class MultiMatchQueryBuilder extends AbstractQueryBuilder<MultiMatchQuery
|
|||
return this;
|
||||
}
|
||||
|
||||
public boolean lenient() {
|
||||
return lenient;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set a cutoff value in [0..1] (or absolute number >=1) representing the
|
||||
|
@ -302,77 +424,70 @@ public class MultiMatchQueryBuilder extends AbstractQueryBuilder<MultiMatchQuery
|
|||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set a cutoff value in [0..1] (or absolute number >=1) representing the
|
||||
* maximum threshold of a terms document frequency to be considered a low
|
||||
* frequency term.
|
||||
*/
|
||||
public MultiMatchQueryBuilder cutoffFrequency(Float cutoff) {
|
||||
this.cutoffFrequency = cutoff;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Float cutoffFrequency() {
|
||||
return cutoffFrequency;
|
||||
}
|
||||
|
||||
public MultiMatchQueryBuilder zeroTermsQuery(MatchQuery.ZeroTermsQuery zeroTermsQuery) {
|
||||
if (zeroTermsQuery == null) {
|
||||
throw new IllegalArgumentException("[" + NAME + "] requires zero terms query to be non-null");
|
||||
}
|
||||
this.zeroTermsQuery = zeroTermsQuery;
|
||||
return this;
|
||||
}
|
||||
|
||||
public MatchQuery.ZeroTermsQuery zeroTermsQuery() {
|
||||
return zeroTermsQuery;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void doXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject(NAME);
|
||||
|
||||
builder.field("query", text);
|
||||
builder.field("query", value);
|
||||
builder.startArray("fields");
|
||||
for (String field : fields) {
|
||||
final int keySlot;
|
||||
if (fieldsBoosts != null && ((keySlot = fieldsBoosts.indexOf(field)) >= 0)) {
|
||||
field += "^" + fieldsBoosts.indexGet(keySlot);
|
||||
}
|
||||
builder.value(field);
|
||||
for (Map.Entry<String, Float> fieldEntry : this.fieldsBoosts.entrySet()) {
|
||||
builder.value(fieldEntry.getKey() + "^" + fieldEntry.getValue());
|
||||
}
|
||||
builder.endArray();
|
||||
|
||||
if (type != null) {
|
||||
builder.field("type", type.toString().toLowerCase(Locale.ENGLISH));
|
||||
}
|
||||
if (operator != null) {
|
||||
builder.field("operator", operator.toString());
|
||||
}
|
||||
builder.field("type", type.toString().toLowerCase(Locale.ENGLISH));
|
||||
builder.field("operator", operator.toString());
|
||||
if (analyzer != null) {
|
||||
builder.field("analyzer", analyzer);
|
||||
}
|
||||
if (slop != null) {
|
||||
builder.field("slop", slop);
|
||||
}
|
||||
builder.field("slop", slop);
|
||||
if (fuzziness != null) {
|
||||
fuzziness.toXContent(builder, params);
|
||||
}
|
||||
if (prefixLength != null) {
|
||||
builder.field("prefix_length", prefixLength);
|
||||
}
|
||||
if (maxExpansions != null) {
|
||||
builder.field("max_expansions", maxExpansions);
|
||||
}
|
||||
builder.field("prefix_length", prefixLength);
|
||||
builder.field("max_expansions", maxExpansions);
|
||||
if (minimumShouldMatch != null) {
|
||||
builder.field("minimum_should_match", minimumShouldMatch);
|
||||
}
|
||||
if (fuzzyRewrite != null) {
|
||||
builder.field("fuzzy_rewrite", fuzzyRewrite);
|
||||
}
|
||||
|
||||
if (useDisMax != null) {
|
||||
builder.field("use_dis_max", useDisMax);
|
||||
}
|
||||
|
||||
if (tieBreaker != null) {
|
||||
builder.field("tie_breaker", tieBreaker);
|
||||
}
|
||||
|
||||
if (lenient != null) {
|
||||
builder.field("lenient", lenient);
|
||||
}
|
||||
|
||||
builder.field("lenient", lenient);
|
||||
if (cutoffFrequency != null) {
|
||||
builder.field("cutoff_frequency", cutoffFrequency);
|
||||
}
|
||||
|
||||
if (zeroTermsQuery != null) {
|
||||
builder.field("zero_terms_query", zeroTermsQuery.toString());
|
||||
}
|
||||
|
||||
builder.field("zero_terms_query", zeroTermsQuery.toString());
|
||||
printBoostAndQueryName(builder);
|
||||
|
||||
builder.endObject();
|
||||
}
|
||||
|
||||
|
@ -380,4 +495,165 @@ public class MultiMatchQueryBuilder extends AbstractQueryBuilder<MultiMatchQuery
|
|||
public String getWriteableName() {
|
||||
return NAME;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Query doToQuery(QueryShardContext context) throws IOException {
|
||||
MultiMatchQuery multiMatchQuery = new MultiMatchQuery(context);
|
||||
if (analyzer != null) {
|
||||
if (context.analysisService().analyzer(analyzer) == null) {
|
||||
throw new QueryShardException(context, "[" + NAME + "] analyzer [" + analyzer + "] not found");
|
||||
}
|
||||
multiMatchQuery.setAnalyzer(analyzer);
|
||||
}
|
||||
multiMatchQuery.setPhraseSlop(slop);
|
||||
if (fuzziness != null) {
|
||||
multiMatchQuery.setFuzziness(fuzziness);
|
||||
}
|
||||
multiMatchQuery.setFuzzyPrefixLength(prefixLength);
|
||||
multiMatchQuery.setMaxExpansions(maxExpansions);
|
||||
multiMatchQuery.setOccur(operator.toBooleanClauseOccur());
|
||||
if (fuzzyRewrite != null) {
|
||||
multiMatchQuery.setFuzzyRewriteMethod(QueryParsers.parseRewriteMethod(context.parseFieldMatcher(), fuzzyRewrite, null));
|
||||
}
|
||||
if (tieBreaker != null) {
|
||||
multiMatchQuery.setTieBreaker(tieBreaker);
|
||||
}
|
||||
if (cutoffFrequency != null) {
|
||||
multiMatchQuery.setCommonTermsCutoff(cutoffFrequency);
|
||||
}
|
||||
multiMatchQuery.setLenient(lenient);
|
||||
multiMatchQuery.setZeroTermsQuery(zeroTermsQuery);
|
||||
|
||||
if (useDisMax != null) { // backwards foobar
|
||||
boolean typeUsesDismax = type.tieBreaker() != 1.0f;
|
||||
if (typeUsesDismax != useDisMax) {
|
||||
if (useDisMax && tieBreaker == null) {
|
||||
multiMatchQuery.setTieBreaker(0.0f);
|
||||
} else {
|
||||
multiMatchQuery.setTieBreaker(1.0f);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Map<String, Float> newFieldsBoosts = handleFieldsMatchPattern(context.mapperService(), fieldsBoosts);
|
||||
|
||||
Query query = multiMatchQuery.parse(type, newFieldsBoosts, value, minimumShouldMatch);
|
||||
if (query == null) {
|
||||
return null;
|
||||
}
|
||||
return query;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void setFinalBoost(Query query) {
|
||||
// we need to preserve the boost that came out of the parsing phase
|
||||
query.setBoost(boost * query.getBoost());
|
||||
}
|
||||
|
||||
private static Map<String, Float> handleFieldsMatchPattern(MapperService mapperService, Map<String, Float> fieldsBoosts) {
|
||||
Map<String, Float> newFieldsBoosts = new TreeMap<>();
|
||||
for (Map.Entry<String, Float> fieldBoost : fieldsBoosts.entrySet()) {
|
||||
String fField = fieldBoost.getKey();
|
||||
Float fBoost = fieldBoost.getValue();
|
||||
if (Regex.isSimpleMatchPattern(fField)) {
|
||||
for (String field : mapperService.simpleMatchToIndexNames(fField)) {
|
||||
newFieldsBoosts.put(field, fBoost);
|
||||
}
|
||||
} else {
|
||||
newFieldsBoosts.put(fField, fBoost);
|
||||
}
|
||||
}
|
||||
return newFieldsBoosts;
|
||||
}
|
||||
|
||||
@Override
|
||||
public QueryValidationException validate() {
|
||||
QueryValidationException validationException = null;
|
||||
if (fieldsBoosts.isEmpty()) {
|
||||
validationException = addValidationError("no fields specified for multi_match query.", validationException);
|
||||
}
|
||||
return validationException;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected MultiMatchQueryBuilder doReadFrom(StreamInput in) throws IOException {
|
||||
MultiMatchQueryBuilder multiMatchQuery = new MultiMatchQueryBuilder(in.readGenericValue());
|
||||
int size = in.readVInt();
|
||||
for (int i = 0; i < size; i++) {
|
||||
multiMatchQuery.fieldsBoosts.put(in.readString(), in.readFloat());
|
||||
}
|
||||
multiMatchQuery.type = MultiMatchQueryBuilder.Type.readTypeFrom(in);
|
||||
multiMatchQuery.operator = Operator.readOperatorFrom(in);
|
||||
multiMatchQuery.analyzer = in.readOptionalString();
|
||||
multiMatchQuery.slop = in.readVInt();
|
||||
if (in.readBoolean()) {
|
||||
multiMatchQuery.fuzziness = Fuzziness.readFuzzinessFrom(in);
|
||||
}
|
||||
multiMatchQuery.prefixLength = in.readVInt();
|
||||
multiMatchQuery.maxExpansions = in.readVInt();
|
||||
multiMatchQuery.minimumShouldMatch = in.readOptionalString();
|
||||
multiMatchQuery.fuzzyRewrite = in.readOptionalString();
|
||||
multiMatchQuery.useDisMax = in.readOptionalBoolean();
|
||||
multiMatchQuery.tieBreaker = (Float) in.readGenericValue();
|
||||
multiMatchQuery.lenient = in.readBoolean();
|
||||
multiMatchQuery.cutoffFrequency = (Float) in.readGenericValue();
|
||||
multiMatchQuery.zeroTermsQuery = MatchQuery.ZeroTermsQuery.readZeroTermsQueryFrom(in);
|
||||
return multiMatchQuery;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doWriteTo(StreamOutput out) throws IOException {
|
||||
out.writeGenericValue(value);
|
||||
out.writeVInt(fieldsBoosts.size());
|
||||
for (Map.Entry<String, Float> fieldsEntry : fieldsBoosts.entrySet()) {
|
||||
out.writeString(fieldsEntry.getKey());
|
||||
out.writeFloat(fieldsEntry.getValue());
|
||||
}
|
||||
type.writeTo(out);
|
||||
operator.writeTo(out);
|
||||
out.writeOptionalString(analyzer);
|
||||
out.writeVInt(slop);
|
||||
if (fuzziness != null) {
|
||||
out.writeBoolean(true);
|
||||
fuzziness.writeTo(out);
|
||||
} else {
|
||||
out.writeBoolean(false);
|
||||
}
|
||||
out.writeVInt(prefixLength);
|
||||
out.writeVInt(maxExpansions);
|
||||
out.writeOptionalString(minimumShouldMatch);
|
||||
out.writeOptionalString(fuzzyRewrite);
|
||||
out.writeOptionalBoolean(useDisMax);
|
||||
out.writeGenericValue(tieBreaker);
|
||||
out.writeBoolean(lenient);
|
||||
out.writeGenericValue(cutoffFrequency);
|
||||
zeroTermsQuery.writeTo(out);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected int doHashCode() {
|
||||
return Objects.hash(value, fieldsBoosts, type, operator, analyzer, slop, fuzziness,
|
||||
prefixLength, maxExpansions, minimumShouldMatch, fuzzyRewrite, useDisMax, tieBreaker, lenient,
|
||||
cutoffFrequency, zeroTermsQuery);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean doEquals(MultiMatchQueryBuilder other) {
|
||||
return Objects.equals(value, other.value) &&
|
||||
Objects.equals(fieldsBoosts, other.fieldsBoosts) &&
|
||||
Objects.equals(type, other.type) &&
|
||||
Objects.equals(operator, other.operator) &&
|
||||
Objects.equals(analyzer, other.analyzer) &&
|
||||
Objects.equals(slop, other.slop) &&
|
||||
Objects.equals(fuzziness, other.fuzziness) &&
|
||||
Objects.equals(prefixLength, other.prefixLength) &&
|
||||
Objects.equals(maxExpansions, other.maxExpansions) &&
|
||||
Objects.equals(minimumShouldMatch, other.minimumShouldMatch) &&
|
||||
Objects.equals(fuzzyRewrite, other.fuzzyRewrite) &&
|
||||
Objects.equals(useDisMax, other.useDisMax) &&
|
||||
Objects.equals(tieBreaker, other.tieBreaker) &&
|
||||
Objects.equals(lenient, other.lenient) &&
|
||||
Objects.equals(cutoffFrequency, other.cutoffFrequency) &&
|
||||
Objects.equals(zeroTermsQuery, other.zeroTermsQuery);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,14 +19,10 @@
|
|||
|
||||
package org.elasticsearch.index.query;
|
||||
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.regex.Regex;
|
||||
import org.elasticsearch.common.unit.Fuzziness;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.query.support.QueryParsers;
|
||||
import org.elasticsearch.index.search.MatchQuery;
|
||||
import org.elasticsearch.index.search.MultiMatchQuery;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
|
@ -35,7 +31,7 @@ import java.util.Map;
|
|||
/**
|
||||
* Same as {@link MatchQueryParser} but has support for multiple fields.
|
||||
*/
|
||||
public class MultiMatchQueryParser extends BaseQueryParserTemp {
|
||||
public class MultiMatchQueryParser extends BaseQueryParser<MultiMatchQueryBuilder> {
|
||||
|
||||
@Override
|
||||
public String[] names() {
|
||||
|
@ -45,31 +41,41 @@ public class MultiMatchQueryParser extends BaseQueryParserTemp {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Query parse(QueryShardContext context) throws IOException, QueryParsingException {
|
||||
QueryParseContext parseContext = context.parseContext();
|
||||
public MultiMatchQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException, QueryParsingException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
|
||||
Object value = null;
|
||||
float boost = AbstractQueryBuilder.DEFAULT_BOOST;
|
||||
Float tieBreaker = null;
|
||||
MultiMatchQueryBuilder.Type type = null;
|
||||
MultiMatchQuery multiMatchQuery = new MultiMatchQuery(context);
|
||||
Map<String, Float> fieldsBoosts = new HashMap<>();
|
||||
MultiMatchQueryBuilder.Type type = MultiMatchQueryBuilder.DEFAULT_TYPE;
|
||||
String analyzer = null;
|
||||
int slop = MultiMatchQueryBuilder.DEFAULT_PHRASE_SLOP;
|
||||
Fuzziness fuzziness = null;
|
||||
int prefixLength = MultiMatchQueryBuilder.DEFAULT_PREFIX_LENGTH;
|
||||
int maxExpansions = MultiMatchQueryBuilder.DEFAULT_MAX_EXPANSIONS;
|
||||
Operator operator = MultiMatchQueryBuilder.DEFAULT_OPERATOR;
|
||||
String minimumShouldMatch = null;
|
||||
Map<String, Float> fieldNameWithBoosts = new HashMap<>();
|
||||
String fuzzyRewrite = null;
|
||||
Boolean useDisMax = null;
|
||||
Float tieBreaker = null;
|
||||
Float cutoffFrequency = null;
|
||||
boolean lenient = MultiMatchQueryBuilder.DEFAULT_LENIENCY;
|
||||
MatchQuery.ZeroTermsQuery zeroTermsQuery = MultiMatchQueryBuilder.DEFAULT_ZERO_TERMS_QUERY;
|
||||
|
||||
float boost = AbstractQueryBuilder.DEFAULT_BOOST;
|
||||
String queryName = null;
|
||||
|
||||
XContentParser.Token token;
|
||||
String currentFieldName = null;
|
||||
Boolean useDisMax = null;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if ("fields".equals(currentFieldName)) {
|
||||
if (token == XContentParser.Token.START_ARRAY) {
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
extractFieldAndBoost(context, parser, fieldNameWithBoosts);
|
||||
parseFieldAndBoost(parser, fieldsBoosts);
|
||||
}
|
||||
} else if (token.isValue()) {
|
||||
extractFieldAndBoost(context, parser, fieldNameWithBoosts);
|
||||
parseFieldAndBoost(parser, fieldsBoosts);
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[" + MultiMatchQueryBuilder.NAME + "] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
|
@ -79,41 +85,37 @@ public class MultiMatchQueryParser extends BaseQueryParserTemp {
|
|||
} else if ("type".equals(currentFieldName)) {
|
||||
type = MultiMatchQueryBuilder.Type.parse(parser.text(), parseContext.parseFieldMatcher());
|
||||
} else if ("analyzer".equals(currentFieldName)) {
|
||||
String analyzer = parser.text();
|
||||
if (context.analysisService().analyzer(analyzer) == null) {
|
||||
throw new QueryParsingException(parseContext, "[" + MultiMatchQueryBuilder.NAME + "] analyzer [" + parser.text() + "] not found");
|
||||
}
|
||||
multiMatchQuery.setAnalyzer(analyzer);
|
||||
analyzer = parser.text();
|
||||
} else if ("boost".equals(currentFieldName)) {
|
||||
boost = parser.floatValue();
|
||||
} else if ("slop".equals(currentFieldName) || "phrase_slop".equals(currentFieldName) || "phraseSlop".equals(currentFieldName)) {
|
||||
multiMatchQuery.setPhraseSlop(parser.intValue());
|
||||
slop = parser.intValue();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, Fuzziness.FIELD)) {
|
||||
multiMatchQuery.setFuzziness(Fuzziness.parse(parser));
|
||||
fuzziness = Fuzziness.parse(parser);
|
||||
} else if ("prefix_length".equals(currentFieldName) || "prefixLength".equals(currentFieldName)) {
|
||||
multiMatchQuery.setFuzzyPrefixLength(parser.intValue());
|
||||
prefixLength = parser.intValue();
|
||||
} else if ("max_expansions".equals(currentFieldName) || "maxExpansions".equals(currentFieldName)) {
|
||||
multiMatchQuery.setMaxExpansions(parser.intValue());
|
||||
maxExpansions = parser.intValue();
|
||||
} else if ("operator".equals(currentFieldName)) {
|
||||
multiMatchQuery.setOccur(Operator.fromString(parser.text()).toBooleanClauseOccur());
|
||||
operator = Operator.fromString(parser.text());
|
||||
} else if ("minimum_should_match".equals(currentFieldName) || "minimumShouldMatch".equals(currentFieldName)) {
|
||||
minimumShouldMatch = parser.textOrNull();
|
||||
} else if ("fuzzy_rewrite".equals(currentFieldName) || "fuzzyRewrite".equals(currentFieldName)) {
|
||||
multiMatchQuery.setFuzzyRewriteMethod(QueryParsers.parseRewriteMethod(parseContext.parseFieldMatcher(), parser.textOrNull(), null));
|
||||
fuzzyRewrite = parser.textOrNull();
|
||||
} else if ("use_dis_max".equals(currentFieldName) || "useDisMax".equals(currentFieldName)) {
|
||||
useDisMax = parser.booleanValue();
|
||||
} else if ("tie_breaker".equals(currentFieldName) || "tieBreaker".equals(currentFieldName)) {
|
||||
multiMatchQuery.setTieBreaker(tieBreaker = parser.floatValue());
|
||||
tieBreaker = parser.floatValue();
|
||||
} else if ("cutoff_frequency".equals(currentFieldName)) {
|
||||
multiMatchQuery.setCommonTermsCutoff(parser.floatValue());
|
||||
cutoffFrequency = parser.floatValue();
|
||||
} else if ("lenient".equals(currentFieldName)) {
|
||||
multiMatchQuery.setLenient(parser.booleanValue());
|
||||
lenient = parser.booleanValue();
|
||||
} else if ("zero_terms_query".equals(currentFieldName)) {
|
||||
String zeroTermsDocs = parser.text();
|
||||
if ("none".equalsIgnoreCase(zeroTermsDocs)) {
|
||||
multiMatchQuery.setZeroTermsQuery(MatchQuery.ZeroTermsQuery.NONE);
|
||||
zeroTermsQuery = MatchQuery.ZeroTermsQuery.NONE;
|
||||
} else if ("all".equalsIgnoreCase(zeroTermsDocs)) {
|
||||
multiMatchQuery.setZeroTermsQuery(MatchQuery.ZeroTermsQuery.ALL);
|
||||
zeroTermsQuery = MatchQuery.ZeroTermsQuery.ALL;
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "Unsupported zero_terms_docs value [" + zeroTermsDocs + "]");
|
||||
}
|
||||
|
@ -129,37 +131,33 @@ public class MultiMatchQueryParser extends BaseQueryParserTemp {
|
|||
throw new QueryParsingException(parseContext, "No text specified for multi_match query");
|
||||
}
|
||||
|
||||
if (fieldNameWithBoosts.isEmpty()) {
|
||||
if (fieldsBoosts.isEmpty()) {
|
||||
throw new QueryParsingException(parseContext, "No fields specified for multi_match query");
|
||||
}
|
||||
if (type == null) {
|
||||
type = MultiMatchQueryBuilder.Type.BEST_FIELDS;
|
||||
}
|
||||
if (useDisMax != null) { // backwards foobar
|
||||
boolean typeUsesDismax = type.tieBreaker() != 1.0f;
|
||||
if (typeUsesDismax != useDisMax) {
|
||||
if (useDisMax && tieBreaker == null) {
|
||||
multiMatchQuery.setTieBreaker(0.0f);
|
||||
} else {
|
||||
multiMatchQuery.setTieBreaker(1.0f);
|
||||
}
|
||||
}
|
||||
}
|
||||
Query query = multiMatchQuery.parse(type, fieldNameWithBoosts, value, minimumShouldMatch);
|
||||
if (query == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
query.setBoost(boost);
|
||||
if (queryName != null) {
|
||||
context.addNamedQuery(queryName, query);
|
||||
}
|
||||
return query;
|
||||
return new MultiMatchQueryBuilder(value)
|
||||
.fields(fieldsBoosts)
|
||||
.type(type)
|
||||
.analyzer(analyzer)
|
||||
.cutoffFrequency(cutoffFrequency)
|
||||
.fuzziness(fuzziness)
|
||||
.fuzzyRewrite(fuzzyRewrite)
|
||||
.useDisMax(useDisMax)
|
||||
.lenient(lenient)
|
||||
.maxExpansions(maxExpansions)
|
||||
.minimumShouldMatch(minimumShouldMatch)
|
||||
.operator(operator)
|
||||
.prefixLength(prefixLength)
|
||||
.slop(slop)
|
||||
.tieBreaker(tieBreaker)
|
||||
.zeroTermsQuery(zeroTermsQuery)
|
||||
.boost(boost)
|
||||
.queryName(queryName);
|
||||
}
|
||||
|
||||
private void extractFieldAndBoost(QueryShardContext context, XContentParser parser, Map<String, Float> fieldNameWithBoosts) throws IOException {
|
||||
private void parseFieldAndBoost(XContentParser parser, Map<String, Float> fieldsBoosts) throws IOException {
|
||||
String fField = null;
|
||||
Float fBoost = null;
|
||||
Float fBoost = AbstractQueryBuilder.DEFAULT_BOOST;
|
||||
char[] fieldText = parser.textCharacters();
|
||||
int end = parser.textOffset() + parser.textLength();
|
||||
for (int i = parser.textOffset(); i < end; i++) {
|
||||
|
@ -173,14 +171,7 @@ public class MultiMatchQueryParser extends BaseQueryParserTemp {
|
|||
if (fField == null) {
|
||||
fField = parser.text();
|
||||
}
|
||||
|
||||
if (Regex.isSimpleMatchPattern(fField)) {
|
||||
for (String field : context.mapperService().simpleMatchToIndexNames(fField)) {
|
||||
fieldNameWithBoosts.put(field, fBoost);
|
||||
}
|
||||
} else {
|
||||
fieldNameWithBoosts.put(fField, fBoost);
|
||||
}
|
||||
fieldsBoosts.put(fField, fBoost);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -20,10 +20,8 @@
|
|||
package org.elasticsearch.index.query;
|
||||
|
||||
import com.carrotsearch.randomizedtesting.generators.CodepointSetGenerator;
|
||||
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.ActionFuture;
|
||||
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
|
||||
import org.elasticsearch.action.get.GetRequest;
|
||||
import org.elasticsearch.action.get.GetResponse;
|
||||
|
@ -79,11 +77,7 @@ import org.elasticsearch.threadpool.ThreadPool;
|
|||
import org.elasticsearch.threadpool.ThreadPoolModule;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.DateTimeZone;
|
||||
import org.junit.After;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.Before;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
import org.junit.*;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.lang.reflect.InvocationHandler;
|
||||
|
@ -95,10 +89,7 @@ import java.util.List;
|
|||
import java.util.Map;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.not;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
|
||||
public abstract class AbstractQueryTestCase<QB extends AbstractQueryBuilder<QB>> extends ESTestCase {
|
||||
|
||||
|
@ -520,6 +511,15 @@ public abstract class AbstractQueryTestCase<QB extends AbstractQueryBuilder<QB>>
|
|||
return value;
|
||||
}
|
||||
|
||||
protected static String getRandomQueryText() {
|
||||
int terms = randomIntBetween(0, 3);
|
||||
StringBuilder builder = new StringBuilder();
|
||||
for (int i = 0; i < terms; i++) {
|
||||
builder.append(randomAsciiOfLengthBetween(1, 10) + " ");
|
||||
}
|
||||
return builder.toString().trim();
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper method to return a mapped or a random field
|
||||
*/
|
||||
|
|
|
@ -0,0 +1,190 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.query;
|
||||
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.queries.ExtendedCommonTermsQuery;
|
||||
import org.apache.lucene.search.*;
|
||||
import org.elasticsearch.common.lucene.all.AllTermQuery;
|
||||
import org.elasticsearch.common.lucene.search.MultiPhrasePrefixQuery;
|
||||
import org.elasticsearch.index.search.MatchQuery;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
||||
import static org.elasticsearch.index.query.QueryBuilders.multiMatchQuery;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertBooleanSubQuery;
|
||||
import static org.hamcrest.CoreMatchers.*;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
|
||||
public class MultiMatchQueryBuilderTests extends AbstractQueryTestCase<MultiMatchQueryBuilder> {
|
||||
|
||||
@Override
|
||||
protected MultiMatchQueryBuilder doCreateTestQueryBuilder() {
|
||||
String fieldName = randomFrom(STRING_FIELD_NAME, INT_FIELD_NAME, DOUBLE_FIELD_NAME, BOOLEAN_FIELD_NAME, DATE_FIELD_NAME);
|
||||
if (fieldName.equals(DATE_FIELD_NAME)) {
|
||||
assumeTrue("test with date fields runs only when at least a type is registered", getCurrentTypes().length > 0);
|
||||
}
|
||||
// creates the query with random value and field name
|
||||
Object value;
|
||||
if (fieldName.equals(STRING_FIELD_NAME)) {
|
||||
value = getRandomQueryText();
|
||||
} else {
|
||||
value = getRandomValueForFieldName(fieldName);
|
||||
}
|
||||
MultiMatchQueryBuilder query = new MultiMatchQueryBuilder(value, fieldName);
|
||||
// field with random boost
|
||||
if (randomBoolean()) {
|
||||
query.field(fieldName, randomFloat() * 10);
|
||||
}
|
||||
// sets other parameters of the multi match query
|
||||
if (randomBoolean()) {
|
||||
query.type(randomFrom(MultiMatchQueryBuilder.Type.values()));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
query.operator(randomFrom(Operator.values()));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
query.analyzer(randomAnalyzer());
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
query.slop(randomIntBetween(0, 5));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
query.fuzziness(randomFuzziness(fieldName));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
query.prefixLength(randomIntBetween(0, 5));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
query.maxExpansions(randomIntBetween(1, 5));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
query.minimumShouldMatch(randomMinimumShouldMatch());
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
query.fuzzyRewrite(getRandomRewriteMethod());
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
query.useDisMax(randomBoolean());
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
query.tieBreaker(randomFloat());
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
query.lenient(randomBoolean());
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
query.cutoffFrequency((float) 10 / randomIntBetween(1, 100));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
query.zeroTermsQuery(randomFrom(MatchQuery.ZeroTermsQuery.values()));
|
||||
}
|
||||
// test with fields with boost and patterns delegated to the tests further below
|
||||
return query;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doAssertLuceneQuery(MultiMatchQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
||||
// we rely on integration tests for deeper checks here
|
||||
assertThat(query, either(instanceOf(TermQuery.class)).or(instanceOf(AllTermQuery.class))
|
||||
.or(instanceOf(BooleanQuery.class)).or(instanceOf(DisjunctionMaxQuery.class))
|
||||
.or(instanceOf(FuzzyQuery.class)).or(instanceOf(MultiPhrasePrefixQuery.class))
|
||||
.or(instanceOf(MatchAllDocsQuery.class)).or(instanceOf(ExtendedCommonTermsQuery.class))
|
||||
.or(instanceOf(MatchNoDocsQuery.class)).or(instanceOf(PhraseQuery.class)));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testValidate() {
|
||||
MultiMatchQueryBuilder multiMatchQueryBuilder = new MultiMatchQueryBuilder("text");
|
||||
assertThat(multiMatchQueryBuilder.validate().validationErrors().size(), is(1));
|
||||
|
||||
multiMatchQueryBuilder = new MultiMatchQueryBuilder("text", "field");
|
||||
assertNull(multiMatchQueryBuilder.validate());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void assertBoost(MultiMatchQueryBuilder queryBuilder, Query query) throws IOException {
|
||||
//we delegate boost checks to specific boost tests below
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testToQueryBoost() throws IOException {
|
||||
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
|
||||
QueryShardContext shardContext = createShardContext();
|
||||
MultiMatchQueryBuilder multiMatchQueryBuilder = new MultiMatchQueryBuilder("test");
|
||||
multiMatchQueryBuilder.field(STRING_FIELD_NAME, 5);
|
||||
Query query = multiMatchQueryBuilder.toQuery(shardContext);
|
||||
assertThat(query, instanceOf(TermQuery.class));
|
||||
assertThat(query.getBoost(), equalTo(5f));
|
||||
|
||||
multiMatchQueryBuilder = new MultiMatchQueryBuilder("test");
|
||||
multiMatchQueryBuilder.field(STRING_FIELD_NAME, 5);
|
||||
multiMatchQueryBuilder.boost(2);
|
||||
query = multiMatchQueryBuilder.toQuery(shardContext);
|
||||
assertThat(query, instanceOf(TermQuery.class));
|
||||
assertThat(query.getBoost(), equalTo(10f));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testToQueryMultipleTermsBooleanQuery() throws Exception {
|
||||
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
|
||||
Query query = multiMatchQuery("test1 test2").field(STRING_FIELD_NAME).useDisMax(false).toQuery(createShardContext());
|
||||
assertThat(query, instanceOf(BooleanQuery.class));
|
||||
BooleanQuery bQuery = (BooleanQuery) query;
|
||||
assertThat(bQuery.clauses().size(), equalTo(2));
|
||||
assertThat(assertBooleanSubQuery(query, TermQuery.class, 0).getTerm(), equalTo(new Term(STRING_FIELD_NAME, "test1")));
|
||||
assertThat(assertBooleanSubQuery(query, TermQuery.class, 1).getTerm(), equalTo(new Term(STRING_FIELD_NAME, "test2")));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testToQueryMultipleFieldsBooleanQuery() throws Exception {
|
||||
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
|
||||
Query query = multiMatchQuery("test").field(STRING_FIELD_NAME).field(STRING_FIELD_NAME_2).useDisMax(false).toQuery(createShardContext());
|
||||
assertThat(query, instanceOf(BooleanQuery.class));
|
||||
BooleanQuery bQuery = (BooleanQuery) query;
|
||||
assertThat(bQuery.clauses().size(), equalTo(2));
|
||||
assertThat(assertBooleanSubQuery(query, TermQuery.class, 0).getTerm(), equalTo(new Term(STRING_FIELD_NAME, "test")));
|
||||
assertThat(assertBooleanSubQuery(query, TermQuery.class, 1).getTerm(), equalTo(new Term(STRING_FIELD_NAME_2, "test")));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testToQueryMultipleFieldsDisMaxQuery() throws Exception {
|
||||
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
|
||||
Query query = multiMatchQuery("test").field(STRING_FIELD_NAME).field(STRING_FIELD_NAME_2).useDisMax(true).toQuery(createShardContext());
|
||||
assertThat(query, instanceOf(DisjunctionMaxQuery.class));
|
||||
DisjunctionMaxQuery disMaxQuery = (DisjunctionMaxQuery) query;
|
||||
List<Query> disjuncts = disMaxQuery.getDisjuncts();
|
||||
assertThat(((TermQuery) disjuncts.get(0)).getTerm(), equalTo(new Term(STRING_FIELD_NAME, "test")));
|
||||
assertThat(((TermQuery) disjuncts.get(1)).getTerm(), equalTo(new Term(STRING_FIELD_NAME_2, "test")));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testToQueryFieldsWildcard() throws Exception {
|
||||
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
|
||||
Query query = multiMatchQuery("test").field("mapped_str*").useDisMax(false).toQuery(createShardContext());
|
||||
assertThat(query, instanceOf(BooleanQuery.class));
|
||||
BooleanQuery bQuery = (BooleanQuery) query;
|
||||
assertThat(bQuery.clauses().size(), equalTo(2));
|
||||
assertThat(assertBooleanSubQuery(query, TermQuery.class, 0).getTerm(), equalTo(new Term(STRING_FIELD_NAME, "test")));
|
||||
assertThat(assertBooleanSubQuery(query, TermQuery.class, 1).getTerm(), equalTo(new Term(STRING_FIELD_NAME_2, "test")));
|
||||
}
|
||||
}
|
|
@ -1957,7 +1957,7 @@ public class SimpleIndexQueryParserTests extends ESSingleNodeTestCase {
|
|||
|
||||
public void testCrossFieldMultiMatchQuery() throws IOException {
|
||||
IndexQueryParserService queryParser = queryParser();
|
||||
Query parsedQuery = queryParser.parse(multiMatchQuery("banon", "name.first^2", "name.last^3", "foobar").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS)).query();
|
||||
Query parsedQuery = queryParser.parse(multiMatchQuery("banon").field("name.first", 2).field("name.last", 3).field("foobar").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS)).query();
|
||||
try (Engine.Searcher searcher = indexService.shardSafe(0).acquireSearcher("test")) {
|
||||
Query rewrittenQuery = searcher.searcher().rewrite(parsedQuery);
|
||||
|
||||
|
|
|
@ -171,7 +171,7 @@ public class MultiMatchQueryIT extends ESIntegTestCase {
|
|||
|
||||
@Test
|
||||
public void testDefaults() throws ExecutionException, InterruptedException {
|
||||
MatchQuery.Type type = randomBoolean() ? null : MatchQuery.Type.BOOLEAN;
|
||||
MatchQuery.Type type = randomBoolean() ? MatchQueryBuilder.DEFAULT_TYPE : MatchQuery.Type.BOOLEAN;
|
||||
SearchResponse searchResponse = client().prepareSearch("test")
|
||||
.setQuery(randomizeType(multiMatchQuery("marvel hero captain america", "full_name", "first_name", "last_name", "category")
|
||||
.operator(Operator.OR))).get();
|
||||
|
@ -279,7 +279,7 @@ public class MultiMatchQueryIT extends ESIntegTestCase {
|
|||
public void testCutoffFreq() throws ExecutionException, InterruptedException {
|
||||
final long numDocs = client().prepareCount("test")
|
||||
.setQuery(matchAllQuery()).get().getCount();
|
||||
MatchQuery.Type type = randomBoolean() ? null : MatchQuery.Type.BOOLEAN;
|
||||
MatchQuery.Type type = randomBoolean() ? MatchQueryBuilder.DEFAULT_TYPE : MatchQuery.Type.BOOLEAN;
|
||||
Float cutoffFrequency = randomBoolean() ? Math.min(1, numDocs * 1.f / between(10, 20)) : 1.f / between(10, 20);
|
||||
SearchResponse searchResponse = client().prepareSearch("test")
|
||||
.setQuery(randomizeType(multiMatchQuery("marvel hero captain america", "full_name", "first_name", "last_name", "category")
|
||||
|
@ -344,7 +344,7 @@ public class MultiMatchQueryIT extends ESIntegTestCase {
|
|||
int numIters = scaledRandomIntBetween(5, 10);
|
||||
for (int i = 0; i < numIters; i++) {
|
||||
{
|
||||
MatchQuery.Type type = randomBoolean() ? null : MatchQuery.Type.BOOLEAN;
|
||||
MatchQuery.Type type = randomBoolean() ? MatchQueryBuilder.DEFAULT_TYPE : MatchQuery.Type.BOOLEAN;
|
||||
MultiMatchQueryBuilder multiMatchQueryBuilder = randomBoolean() ? multiMatchQuery("marvel hero captain america", "full_name", "first_name", "last_name", "category") :
|
||||
multiMatchQuery("marvel hero captain america", "*_name", randomBoolean() ? "category" : "categ*");
|
||||
SearchResponse left = client().prepareSearch("test").setSize(numDocs)
|
||||
|
@ -364,7 +364,7 @@ public class MultiMatchQueryIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
{
|
||||
MatchQuery.Type type = randomBoolean() ? null : MatchQuery.Type.BOOLEAN;
|
||||
MatchQuery.Type type = randomBoolean() ? MatchQueryBuilder.DEFAULT_TYPE : MatchQuery.Type.BOOLEAN;
|
||||
String minShouldMatch = randomBoolean() ? null : "" + between(0, 1);
|
||||
Operator op = randomBoolean() ? Operator.AND : Operator.OR;
|
||||
MultiMatchQueryBuilder multiMatchQueryBuilder = randomBoolean() ? multiMatchQuery("captain america", "full_name", "first_name", "last_name", "category") :
|
||||
|
@ -509,20 +509,20 @@ public class MultiMatchQueryIT extends ESIntegTestCase {
|
|||
// counter example
|
||||
searchResponse = client().prepareSearch("test")
|
||||
.setQuery(randomizeType(multiMatchQuery("captain america marvel hero", "first_name", "last_name", "category")
|
||||
.type(randomBoolean() ? MultiMatchQueryBuilder.Type.CROSS_FIELDS : null)
|
||||
.type(randomBoolean() ? MultiMatchQueryBuilder.Type.CROSS_FIELDS : MultiMatchQueryBuilder.DEFAULT_TYPE)
|
||||
.operator(Operator.AND))).get();
|
||||
assertHitCount(searchResponse, 0l);
|
||||
|
||||
// counter example
|
||||
searchResponse = client().prepareSearch("test")
|
||||
.setQuery(randomizeType(multiMatchQuery("captain america marvel hero", "first_name", "last_name", "category")
|
||||
.type(randomBoolean() ? MultiMatchQueryBuilder.Type.CROSS_FIELDS : null)
|
||||
.type(randomBoolean() ? MultiMatchQueryBuilder.Type.CROSS_FIELDS : MultiMatchQueryBuilder.DEFAULT_TYPE)
|
||||
.operator(Operator.AND))).get();
|
||||
assertHitCount(searchResponse, 0l);
|
||||
|
||||
// test if boosts work
|
||||
searchResponse = client().prepareSearch("test")
|
||||
.setQuery(randomizeType(multiMatchQuery("the ultimate", "full_name", "first_name", "last_name^2", "category")
|
||||
.setQuery(randomizeType(multiMatchQuery("the ultimate", "full_name", "first_name", "last_name", "category").field("last_name", 2)
|
||||
.type(MultiMatchQueryBuilder.Type.CROSS_FIELDS)
|
||||
.operator(Operator.AND))).get();
|
||||
assertFirstHit(searchResponse, hasId("ultimate1")); // has ultimate in the last_name and that is boosted
|
||||
|
@ -560,7 +560,6 @@ public class MultiMatchQueryIT extends ESIntegTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
public static List<String> fill(List<String> list, String value, int times) {
|
||||
for (int i = 0; i < times; i++) {
|
||||
list.add(value);
|
||||
|
|
|
@ -908,7 +908,7 @@ public class SearchQueryIT extends ESIntegTestCase {
|
|||
assertFirstHit(searchResponse, hasId("1"));
|
||||
|
||||
refresh();
|
||||
builder = multiMatchQuery("value1", "field1", "field3^1.5")
|
||||
builder = multiMatchQuery("value1", "field1").field("field3", 1.5f)
|
||||
.operator(Operator.AND); // Operator only applies on terms inside a field! Fields are always OR-ed together.
|
||||
searchResponse = client().prepareSearch().setQuery(builder).get();
|
||||
assertHitCount(searchResponse, 2l);
|
||||
|
@ -1826,15 +1826,15 @@ public class SearchQueryIT extends ESIntegTestCase {
|
|||
refresh();
|
||||
|
||||
SearchResponse searchResponse = client().prepareSearch("test")
|
||||
.setQuery(multiMatchQuery("value2", "field1^2", "field2").lenient(true).useDisMax(false)).get();
|
||||
.setQuery(multiMatchQuery("value2", "field2").field("field1", 2).lenient(true).useDisMax(false)).get();
|
||||
assertHitCount(searchResponse, 1l);
|
||||
|
||||
searchResponse = client().prepareSearch("test")
|
||||
.setQuery(multiMatchQuery("value2", "field1^2", "field2").lenient(true).useDisMax(true)).get();
|
||||
.setQuery(multiMatchQuery("value2", "field2").field("field1", 2).lenient(true).useDisMax(true)).get();
|
||||
assertHitCount(searchResponse, 1l);
|
||||
|
||||
searchResponse = client().prepareSearch("test")
|
||||
.setQuery(multiMatchQuery("value2", "field2^2").lenient(true)).get();
|
||||
.setQuery(multiMatchQuery("value2").field("field2", 2).lenient(true)).get();
|
||||
assertHitCount(searchResponse, 1l);
|
||||
}
|
||||
|
||||
|
|
|
@ -104,3 +104,11 @@ The deprecated `docs(Item... docs)`, `ignoreLike(Item... docs)`,
|
|||
|
||||
Removing individual setters for lon() and lat() values, both values should be set together
|
||||
using point(lon, lat).
|
||||
|
||||
==== MultiMatchQueryBuilder
|
||||
|
||||
Moving MultiMatchQueryBuilder.ZeroTermsQuery enum to MatchQuery..ZeroTermsQuery.
|
||||
Also reusing new Operator enum.
|
||||
|
||||
Removed ability to pass in boost value using `field(String field)` method in form e.g. `field^2`.
|
||||
Use the `field(String, float)` method instead.
|
||||
|
|
Loading…
Reference in New Issue