diff --git a/Vagrantfile b/Vagrantfile index dcc33bc8f5f..ab0e3224015 100644 --- a/Vagrantfile +++ b/Vagrantfile @@ -83,6 +83,10 @@ Vagrant.configure(2) do |config| # the elasticsearch project called vagrant.... config.vm.synced_folder ".", "/vagrant", disabled: true config.vm.synced_folder ".", "/elasticsearch" + config.vm.provider "virtualbox" do |v| + # Give the boxes 2GB so they can run our tests if they have to. + v.memory = 2048 + end if Vagrant.has_plugin?("vagrant-cachier") config.cache.scope = :box end diff --git a/core/src/main/java/org/apache/lucene/queries/ExtendedCommonTermsQuery.java b/core/src/main/java/org/apache/lucene/queries/ExtendedCommonTermsQuery.java index e09c555fd37..1889c6e759b 100644 --- a/core/src/main/java/org/apache/lucene/queries/ExtendedCommonTermsQuery.java +++ b/core/src/main/java/org/apache/lucene/queries/ExtendedCommonTermsQuery.java @@ -76,6 +76,10 @@ public class ExtendedCommonTermsQuery extends CommonTermsQuery { return lowFreqMinNumShouldMatchSpec; } + public float getMaxTermFrequency() { + return this.maxTermFrequency; + } + @Override protected Query newTermQuery(Term term, TermContext context) { if (fieldType == null) { diff --git a/core/src/main/java/org/apache/lucene/queryparser/classic/ExistsFieldQueryExtension.java b/core/src/main/java/org/apache/lucene/queryparser/classic/ExistsFieldQueryExtension.java index 6cac629796f..cb4bee30aaa 100644 --- a/core/src/main/java/org/apache/lucene/queryparser/classic/ExistsFieldQueryExtension.java +++ b/core/src/main/java/org/apache/lucene/queryparser/classic/ExistsFieldQueryExtension.java @@ -21,8 +21,8 @@ package org.apache.lucene.queryparser.classic; import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.Query; -import org.elasticsearch.index.query.ExistsQueryParser; -import org.elasticsearch.index.query.QueryParseContext; +import org.elasticsearch.index.query.ExistsQueryBuilder; +import org.elasticsearch.index.query.QueryShardContext; /** * @@ -32,7 +32,7 @@ public class ExistsFieldQueryExtension implements FieldQueryExtension { public static final String NAME = "_exists_"; @Override - public Query query(QueryParseContext parseContext, String queryText) { - return new ConstantScoreQuery(ExistsQueryParser.newFilter(parseContext, queryText, null)); + public Query query(QueryShardContext context, String queryText) { + return new ConstantScoreQuery(ExistsQueryBuilder.newFilter(context, queryText)); } } diff --git a/core/src/main/java/org/apache/lucene/queryparser/classic/FieldQueryExtension.java b/core/src/main/java/org/apache/lucene/queryparser/classic/FieldQueryExtension.java index 003ff180ba3..299a37a1550 100644 --- a/core/src/main/java/org/apache/lucene/queryparser/classic/FieldQueryExtension.java +++ b/core/src/main/java/org/apache/lucene/queryparser/classic/FieldQueryExtension.java @@ -20,12 +20,12 @@ package org.apache.lucene.queryparser.classic; import org.apache.lucene.search.Query; -import org.elasticsearch.index.query.QueryParseContext; +import org.elasticsearch.index.query.QueryShardContext; /** * */ public interface FieldQueryExtension { - Query query(QueryParseContext parseContext, String queryText); + Query query(QueryShardContext context, String queryText); } diff --git a/core/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java b/core/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java index fe0f640542e..3a61daeca12 100644 --- a/core/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java +++ b/core/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java @@ -19,31 +19,21 @@ package org.apache.lucene.queryparser.classic; +import com.google.common.collect.ImmutableMap; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.apache.lucene.index.Term; -import org.apache.lucene.search.BooleanClause; -import org.apache.lucene.search.DisjunctionMaxQuery; -import org.apache.lucene.search.FilteredQuery; -import org.apache.lucene.search.FuzzyQuery; -import org.apache.lucene.search.MatchNoDocsQuery; -import org.apache.lucene.search.MultiPhraseQuery; -import org.apache.lucene.search.PhraseQuery; -import org.apache.lucene.search.Query; -import org.apache.lucene.search.WildcardQuery; -import org.apache.lucene.util.Version; +import org.apache.lucene.search.*; import org.apache.lucene.util.automaton.RegExp; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.core.DateFieldMapper; -import org.elasticsearch.index.query.QueryParseContext; +import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.support.QueryParsers; -import com.google.common.collect.ImmutableMap; - import java.io.IOException; import java.util.ArrayList; import java.util.Collection; @@ -70,53 +60,27 @@ public class MapperQueryParser extends QueryParser { .build(); } - private final QueryParseContext parseContext; + private final QueryShardContext context; private QueryParserSettings settings; - private Analyzer quoteAnalyzer; - - private boolean forcedAnalyzer; - private boolean forcedQuoteAnalyzer; - private MappedFieldType currentFieldType; - private boolean analyzeWildcard; - - private String quoteFieldSuffix; - - public MapperQueryParser(QueryParseContext parseContext) { + public MapperQueryParser(QueryShardContext context) { super(null, null); - this.parseContext = parseContext; + this.context = context; } public void reset(QueryParserSettings settings) { this.settings = settings; - this.field = settings.defaultField(); - - if (settings.fields() != null) { - if (settings.fields.size() == 1) { - // just mark it as the default field - this.field = settings.fields().get(0); - } else { - // otherwise, we need to have the default field being null... - this.field = null; - } - } - - this.forcedAnalyzer = settings.forcedAnalyzer() != null; - this.setAnalyzer(forcedAnalyzer ? settings.forcedAnalyzer() : settings.defaultAnalyzer()); - if (settings.forcedQuoteAnalyzer() != null) { - this.forcedQuoteAnalyzer = true; - this.quoteAnalyzer = settings.forcedQuoteAnalyzer(); - } else if (forcedAnalyzer) { - this.forcedQuoteAnalyzer = true; - this.quoteAnalyzer = settings.forcedAnalyzer(); + if (settings.fieldsAndWeights().isEmpty()) { + this.field = settings.defaultField(); + } else if (settings.fieldsAndWeights().size() == 1) { + this.field = settings.fieldsAndWeights().keySet().iterator().next(); } else { - this.forcedAnalyzer = false; - this.quoteAnalyzer = settings.defaultQuoteAnalyzer(); + this.field = null; } - this.quoteFieldSuffix = settings.quoteFieldSuffix(); + setAnalyzer(settings.analyzer()); setMultiTermRewriteMethod(settings.rewriteMethod()); setEnablePositionIncrements(settings.enablePositionIncrements()); setAutoGeneratePhraseQueries(settings.autoGeneratePhraseQueries()); @@ -125,10 +89,9 @@ public class MapperQueryParser extends QueryParser { setLowercaseExpandedTerms(settings.lowercaseExpandedTerms()); setPhraseSlop(settings.phraseSlop()); setDefaultOperator(settings.defaultOperator()); - setFuzzyMinSim(settings.getFuzziness().asFloat()); + setFuzzyMinSim(settings.fuzziness().asFloat()); setFuzzyPrefixLength(settings.fuzzyPrefixLength()); setLocale(settings.locale()); - this.analyzeWildcard = settings.analyzeWildcard(); } /** @@ -162,7 +125,7 @@ public class MapperQueryParser extends QueryParser { public Query getFieldQuery(String field, String queryText, boolean quoted) throws ParseException { FieldQueryExtension fieldQueryExtension = fieldQueryExtensions.get(field); if (fieldQueryExtension != null) { - return fieldQueryExtension.query(parseContext, queryText); + return fieldQueryExtension.query(context, queryText); } Collection fields = extractMultiFields(field); if (fields != null) { @@ -224,29 +187,29 @@ public class MapperQueryParser extends QueryParser { Analyzer oldAnalyzer = getAnalyzer(); try { if (quoted) { - setAnalyzer(quoteAnalyzer); - if (quoteFieldSuffix != null) { - currentFieldType = parseContext.fieldMapper(field + quoteFieldSuffix); + setAnalyzer(settings.quoteAnalyzer()); + if (settings.quoteFieldSuffix() != null) { + currentFieldType = context.fieldMapper(field + settings.quoteFieldSuffix()); } } if (currentFieldType == null) { - currentFieldType = parseContext.fieldMapper(field); + currentFieldType = context.fieldMapper(field); } if (currentFieldType != null) { if (quoted) { - if (!forcedQuoteAnalyzer) { - setAnalyzer(parseContext.getSearchQuoteAnalyzer(currentFieldType)); + if (!settings.forceQuoteAnalyzer()) { + setAnalyzer(context.getSearchQuoteAnalyzer(currentFieldType)); } } else { - if (!forcedAnalyzer) { - setAnalyzer(parseContext.getSearchAnalyzer(currentFieldType)); + if (!settings.forceAnalyzer()) { + setAnalyzer(context.getSearchAnalyzer(currentFieldType)); } } if (currentFieldType != null) { Query query = null; if (currentFieldType.useTermQueryWithQueryString()) { try { - query = currentFieldType.termQuery(queryText, parseContext); + query = currentFieldType.termQuery(queryText, context); } catch (RuntimeException e) { if (settings.lenient()) { return null; @@ -357,7 +320,7 @@ public class MapperQueryParser extends QueryParser { } private Query getRangeQuerySingle(String field, String part1, String part2, boolean startInclusive, boolean endInclusive) { - currentFieldType = parseContext.fieldMapper(field); + currentFieldType = context.fieldMapper(field); if (currentFieldType != null) { if (lowercaseExpandedTerms && !currentFieldType.isNumeric()) { part1 = part1 == null ? null : part1.toLowerCase(locale); @@ -422,7 +385,7 @@ public class MapperQueryParser extends QueryParser { } private Query getFuzzyQuerySingle(String field, String termStr, String minSimilarity) throws ParseException { - currentFieldType = parseContext.fieldMapper(field); + currentFieldType = context.fieldMapper(field); if (currentFieldType != null) { try { return currentFieldType.fuzzyQuery(termStr, Fuzziness.build(minSimilarity), fuzzyPrefixLength, settings.fuzzyMaxExpansions(), FuzzyQuery.defaultTranspositions); @@ -492,14 +455,14 @@ public class MapperQueryParser extends QueryParser { currentFieldType = null; Analyzer oldAnalyzer = getAnalyzer(); try { - currentFieldType = parseContext.fieldMapper(field); + currentFieldType = context.fieldMapper(field); if (currentFieldType != null) { - if (!forcedAnalyzer) { - setAnalyzer(parseContext.getSearchAnalyzer(currentFieldType)); + if (!settings.forceAnalyzer()) { + setAnalyzer(context.getSearchAnalyzer(currentFieldType)); } Query query = null; if (currentFieldType.useTermQueryWithQueryString()) { - query = currentFieldType.prefixQuery(termStr, multiTermRewriteMethod, parseContext); + query = currentFieldType.prefixQuery(termStr, multiTermRewriteMethod, context); } if (query == null) { query = getPossiblyAnalyzedPrefixQuery(currentFieldType.names().indexName(), termStr); @@ -518,7 +481,7 @@ public class MapperQueryParser extends QueryParser { } private Query getPossiblyAnalyzedPrefixQuery(String field, String termStr) throws ParseException { - if (!analyzeWildcard) { + if (!settings.analyzeWildcard()) { return super.getPrefixQuery(field, termStr); } // get Analyzer from superclass and tokenize the term @@ -556,16 +519,7 @@ public class MapperQueryParser extends QueryParser { clauses.add(new BooleanClause(super.getPrefixQuery(field, token), BooleanClause.Occur.SHOULD)); } return getBooleanQuery(clauses, true); - - //return super.getPrefixQuery(field, termStr); - - /* this means that the analyzer used either added or consumed -* (common for a stemmer) tokens, and we can't build a PrefixQuery */ -// throw new ParseException("Cannot build PrefixQuery with analyzer " -// + getAnalyzer().getClass() -// + (tlist.size() > 1 ? " - token(s) added" : " - token consumed")); } - } @Override @@ -584,7 +538,7 @@ public class MapperQueryParser extends QueryParser { return newMatchAllDocsQuery(); } // effectively, we check if a field exists or not - return fieldQueryExtensions.get(ExistsFieldQueryExtension.NAME).query(parseContext, actualField); + return fieldQueryExtensions.get(ExistsFieldQueryExtension.NAME).query(context, actualField); } } if (lowercaseExpandedTerms) { @@ -633,10 +587,10 @@ public class MapperQueryParser extends QueryParser { currentFieldType = null; Analyzer oldAnalyzer = getAnalyzer(); try { - currentFieldType = parseContext.fieldMapper(field); + currentFieldType = context.fieldMapper(field); if (currentFieldType != null) { - if (!forcedAnalyzer) { - setAnalyzer(parseContext.getSearchAnalyzer(currentFieldType)); + if (!settings.forceAnalyzer()) { + setAnalyzer(context.getSearchAnalyzer(currentFieldType)); } indexedNameField = currentFieldType.names().indexName(); return getPossiblyAnalyzedWildcardQuery(indexedNameField, termStr); @@ -653,7 +607,7 @@ public class MapperQueryParser extends QueryParser { } private Query getPossiblyAnalyzedWildcardQuery(String field, String termStr) throws ParseException { - if (!analyzeWildcard) { + if (!settings.analyzeWildcard()) { return super.getWildcardQuery(field, termStr); } boolean isWithinToken = (!termStr.startsWith("?") && !termStr.startsWith("*")); @@ -765,14 +719,14 @@ public class MapperQueryParser extends QueryParser { currentFieldType = null; Analyzer oldAnalyzer = getAnalyzer(); try { - currentFieldType = parseContext.fieldMapper(field); + currentFieldType = context.fieldMapper(field); if (currentFieldType != null) { - if (!forcedAnalyzer) { - setAnalyzer(parseContext.getSearchAnalyzer(currentFieldType)); + if (!settings.forceAnalyzer()) { + setAnalyzer(context.getSearchAnalyzer(currentFieldType)); } Query query = null; if (currentFieldType.useTermQueryWithQueryString()) { - query = currentFieldType.regexpQuery(termStr, RegExp.ALL, maxDeterminizedStates, multiTermRewriteMethod, parseContext); + query = currentFieldType.regexpQuery(termStr, RegExp.ALL, maxDeterminizedStates, multiTermRewriteMethod, context); } if (query == null) { query = super.getRegexpQuery(field, termStr); @@ -800,9 +754,9 @@ public class MapperQueryParser extends QueryParser { } private void applyBoost(String field, Query q) { - if (settings.boosts() != null) { - float boost = settings.boosts().getOrDefault(field, 1f); - q.setBoost(boost); + Float fieldBoost = settings.fieldsAndWeights().get(field); + if (fieldBoost != null) { + q.setBoost(fieldBoost); } } @@ -828,11 +782,11 @@ public class MapperQueryParser extends QueryParser { } private Collection extractMultiFields(String field) { - Collection fields = null; + Collection fields; if (field != null) { - fields = parseContext.simpleMatchToIndexNames(field); + fields = context.simpleMatchToIndexNames(field); } else { - fields = settings.fields(); + fields = settings.fieldsAndWeights().keySet(); } return fields; } diff --git a/core/src/main/java/org/apache/lucene/queryparser/classic/MissingFieldQueryExtension.java b/core/src/main/java/org/apache/lucene/queryparser/classic/MissingFieldQueryExtension.java index ed1b7043bc3..f9fc8c9d5dc 100644 --- a/core/src/main/java/org/apache/lucene/queryparser/classic/MissingFieldQueryExtension.java +++ b/core/src/main/java/org/apache/lucene/queryparser/classic/MissingFieldQueryExtension.java @@ -21,8 +21,8 @@ package org.apache.lucene.queryparser.classic; import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.Query; -import org.elasticsearch.index.query.MissingQueryParser; -import org.elasticsearch.index.query.QueryParseContext; +import org.elasticsearch.index.query.MissingQueryBuilder; +import org.elasticsearch.index.query.QueryShardContext; /** * @@ -32,8 +32,11 @@ public class MissingFieldQueryExtension implements FieldQueryExtension { public static final String NAME = "_missing_"; @Override - public Query query(QueryParseContext parseContext, String queryText) { - return new ConstantScoreQuery(MissingQueryParser.newFilter(parseContext, queryText, - MissingQueryParser.DEFAULT_EXISTENCE_VALUE, MissingQueryParser.DEFAULT_NULL_VALUE, null)); + public Query query(QueryShardContext context, String queryText) { + Query query = MissingQueryBuilder.newFilter(context, queryText, MissingQueryBuilder.DEFAULT_EXISTENCE_VALUE, MissingQueryBuilder.DEFAULT_NULL_VALUE); + if (query != null) { + return new ConstantScoreQuery(query); + } + return null; } } diff --git a/core/src/main/java/org/apache/lucene/queryparser/classic/QueryParserSettings.java b/core/src/main/java/org/apache/lucene/queryparser/classic/QueryParserSettings.java index 76e8b4fccd9..c1fc2ae556e 100644 --- a/core/src/main/java/org/apache/lucene/queryparser/classic/QueryParserSettings.java +++ b/core/src/main/java/org/apache/lucene/queryparser/classic/QueryParserSettings.java @@ -19,66 +19,74 @@ package org.apache.lucene.queryparser.classic; -import com.carrotsearch.hppc.ObjectFloatHashMap; import org.apache.lucene.analysis.Analyzer; -import org.apache.lucene.search.FuzzyQuery; import org.apache.lucene.search.MultiTermQuery; -import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.common.unit.Fuzziness; import org.joda.time.DateTimeZone; -import java.util.List; import java.util.Locale; +import java.util.Map; /** - * + * Encapsulates settings that affect query_string parsing via {@link MapperQueryParser} */ public class QueryParserSettings { - public static final boolean DEFAULT_ALLOW_LEADING_WILDCARD = true; - public static final boolean DEFAULT_ANALYZE_WILDCARD = false; - public static final float DEFAULT_BOOST = 1.f; + private final String queryString; - private String queryString; private String defaultField; - private float boost = DEFAULT_BOOST; - private MapperQueryParser.Operator defaultOperator = QueryParser.Operator.OR; - private boolean autoGeneratePhraseQueries = false; - private boolean allowLeadingWildcard = DEFAULT_ALLOW_LEADING_WILDCARD; - private boolean lowercaseExpandedTerms = true; - private boolean enablePositionIncrements = true; - private int phraseSlop = 0; - private Fuzziness fuzziness = Fuzziness.AUTO; - private int fuzzyPrefixLength = FuzzyQuery.defaultPrefixLength; - private int fuzzyMaxExpansions = FuzzyQuery.defaultMaxExpansions; - private int maxDeterminizedStates = Operations.DEFAULT_MAX_DETERMINIZED_STATES; - private MultiTermQuery.RewriteMethod fuzzyRewriteMethod = null; - private boolean analyzeWildcard = DEFAULT_ANALYZE_WILDCARD; - private boolean escape = false; - private Analyzer defaultAnalyzer = null; - private Analyzer defaultQuoteAnalyzer = null; - private Analyzer forcedAnalyzer = null; - private Analyzer forcedQuoteAnalyzer = null; - private String quoteFieldSuffix = null; - private MultiTermQuery.RewriteMethod rewriteMethod = MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE; - private String minimumShouldMatch; - private boolean lenient; + + private Map fieldsAndWeights; + + private QueryParser.Operator defaultOperator; + + private Analyzer analyzer; + private boolean forceAnalyzer; + private Analyzer quoteAnalyzer; + private boolean forceQuoteAnalyzer; + + private String quoteFieldSuffix; + + private boolean autoGeneratePhraseQueries; + + private boolean allowLeadingWildcard; + + private boolean analyzeWildcard; + + private boolean lowercaseExpandedTerms; + + private boolean enablePositionIncrements; + private Locale locale; + + private Fuzziness fuzziness; + private int fuzzyPrefixLength; + private int fuzzyMaxExpansions; + private MultiTermQuery.RewriteMethod fuzzyRewriteMethod; + + private int phraseSlop; + + private boolean useDisMax; + + private float tieBreaker; + + private MultiTermQuery.RewriteMethod rewriteMethod; + + private boolean lenient; + private DateTimeZone timeZone; - List fields = null; - ObjectFloatHashMap boosts = null; - float tieBreaker = 0.0f; - boolean useDisMax = true; + /** To limit effort spent determinizing regexp queries. */ + private int maxDeterminizedStates; + + public QueryParserSettings(String queryString) { + this.queryString = queryString; + } public String queryString() { return queryString; } - public void queryString(String queryString) { - this.queryString = queryString; - } - public String defaultField() { return defaultField; } @@ -87,12 +95,12 @@ public class QueryParserSettings { this.defaultField = defaultField; } - public float boost() { - return boost; + public Map fieldsAndWeights() { + return fieldsAndWeights; } - public void boost(float boost) { - this.boost = boost; + public void fieldsAndWeights(Map fieldsAndWeights) { + this.fieldsAndWeights = fieldsAndWeights; } public QueryParser.Operator defaultOperator() { @@ -175,44 +183,40 @@ public class QueryParserSettings { this.fuzzyRewriteMethod = fuzzyRewriteMethod; } - public boolean escape() { - return escape; + public void defaultAnalyzer(Analyzer analyzer) { + this.analyzer = analyzer; + this.forceAnalyzer = false; } - public void escape(boolean escape) { - this.escape = escape; + public void forceAnalyzer(Analyzer analyzer) { + this.analyzer = analyzer; + this.forceAnalyzer = true; } - public Analyzer defaultAnalyzer() { - return defaultAnalyzer; + public Analyzer analyzer() { + return analyzer; } - public void defaultAnalyzer(Analyzer defaultAnalyzer) { - this.defaultAnalyzer = defaultAnalyzer; + public boolean forceAnalyzer() { + return forceAnalyzer; } - public Analyzer defaultQuoteAnalyzer() { - return defaultQuoteAnalyzer; + public void defaultQuoteAnalyzer(Analyzer quoteAnalyzer) { + this.quoteAnalyzer = quoteAnalyzer; + this.forceQuoteAnalyzer = false; } - public void defaultQuoteAnalyzer(Analyzer defaultAnalyzer) { - this.defaultQuoteAnalyzer = defaultAnalyzer; + public void forceQuoteAnalyzer(Analyzer quoteAnalyzer) { + this.quoteAnalyzer = quoteAnalyzer; + this.forceQuoteAnalyzer = true; } - public Analyzer forcedAnalyzer() { - return forcedAnalyzer; + public Analyzer quoteAnalyzer() { + return quoteAnalyzer; } - public void forcedAnalyzer(Analyzer forcedAnalyzer) { - this.forcedAnalyzer = forcedAnalyzer; - } - - public Analyzer forcedQuoteAnalyzer() { - return forcedQuoteAnalyzer; - } - - public void forcedQuoteAnalyzer(Analyzer forcedAnalyzer) { - this.forcedQuoteAnalyzer = forcedAnalyzer; + public boolean forceQuoteAnalyzer() { + return forceQuoteAnalyzer; } public boolean analyzeWildcard() { @@ -231,14 +235,6 @@ public class QueryParserSettings { this.rewriteMethod = rewriteMethod; } - public String minimumShouldMatch() { - return this.minimumShouldMatch; - } - - public void minimumShouldMatch(String minimumShouldMatch) { - this.minimumShouldMatch = minimumShouldMatch; - } - public void quoteFieldSuffix(String quoteFieldSuffix) { this.quoteFieldSuffix = quoteFieldSuffix; } @@ -255,22 +251,6 @@ public class QueryParserSettings { return this.lenient; } - public List fields() { - return fields; - } - - public void fields(List fields) { - this.fields = fields; - } - - public ObjectFloatHashMap boosts() { - return boosts; - } - - public void boosts(ObjectFloatHashMap boosts) { - this.boosts = boosts; - } - public float tieBreaker() { return tieBreaker; } @@ -303,97 +283,11 @@ public class QueryParserSettings { return this.timeZone; } - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - QueryParserSettings that = (QueryParserSettings) o; - - if (autoGeneratePhraseQueries != that.autoGeneratePhraseQueries()) return false; - if (maxDeterminizedStates != that.maxDeterminizedStates()) return false; - if (allowLeadingWildcard != that.allowLeadingWildcard) return false; - if (Float.compare(that.boost, boost) != 0) return false; - if (enablePositionIncrements != that.enablePositionIncrements) return false; - if (escape != that.escape) return false; - if (analyzeWildcard != that.analyzeWildcard) return false; - if (fuzziness != null ? fuzziness.equals(that.fuzziness) == false : fuzziness != null) return false; - if (fuzzyPrefixLength != that.fuzzyPrefixLength) return false; - if (fuzzyMaxExpansions != that.fuzzyMaxExpansions) return false; - if (fuzzyRewriteMethod != null ? !fuzzyRewriteMethod.equals(that.fuzzyRewriteMethod) : that.fuzzyRewriteMethod != null) - return false; - if (lowercaseExpandedTerms != that.lowercaseExpandedTerms) return false; - if (phraseSlop != that.phraseSlop) return false; - if (defaultAnalyzer != null ? !defaultAnalyzer.equals(that.defaultAnalyzer) : that.defaultAnalyzer != null) - return false; - if (defaultQuoteAnalyzer != null ? !defaultQuoteAnalyzer.equals(that.defaultQuoteAnalyzer) : that.defaultQuoteAnalyzer != null) - return false; - if (forcedAnalyzer != null ? !forcedAnalyzer.equals(that.forcedAnalyzer) : that.forcedAnalyzer != null) - return false; - if (forcedQuoteAnalyzer != null ? !forcedQuoteAnalyzer.equals(that.forcedQuoteAnalyzer) : that.forcedQuoteAnalyzer != null) - return false; - if (defaultField != null ? !defaultField.equals(that.defaultField) : that.defaultField != null) return false; - if (defaultOperator != that.defaultOperator) return false; - if (queryString != null ? !queryString.equals(that.queryString) : that.queryString != null) return false; - if (rewriteMethod != null ? !rewriteMethod.equals(that.rewriteMethod) : that.rewriteMethod != null) - return false; - if (minimumShouldMatch != null ? !minimumShouldMatch.equals(that.minimumShouldMatch) : that.minimumShouldMatch != null) - return false; - if (quoteFieldSuffix != null ? !quoteFieldSuffix.equals(that.quoteFieldSuffix) : that.quoteFieldSuffix != null) - return false; - if (lenient != that.lenient) { - return false; - } - if (locale != null ? !locale.equals(that.locale) : that.locale != null) { - return false; - } - if (timeZone != null ? !timeZone.equals(that.timeZone) : that.timeZone != null) { - return false; - } - - if (Float.compare(that.tieBreaker, tieBreaker) != 0) return false; - if (useDisMax != that.useDisMax) return false; - if (boosts != null ? !boosts.equals(that.boosts) : that.boosts != null) return false; - if (fields != null ? !fields.equals(that.fields) : that.fields != null) return false; - - return true; - } - - @Override - public int hashCode() { - int result = queryString != null ? queryString.hashCode() : 0; - result = 31 * result + (defaultField != null ? defaultField.hashCode() : 0); - result = 31 * result + (boost != +0.0f ? Float.floatToIntBits(boost) : 0); - result = 31 * result + (defaultOperator != null ? defaultOperator.hashCode() : 0); - result = 31 * result + (autoGeneratePhraseQueries ? 1 : 0); - result = 31 * result + maxDeterminizedStates; - result = 31 * result + (allowLeadingWildcard ? 1 : 0); - result = 31 * result + (lowercaseExpandedTerms ? 1 : 0); - result = 31 * result + (enablePositionIncrements ? 1 : 0); - result = 31 * result + phraseSlop; - result = 31 * result + (fuzziness.hashCode()); - result = 31 * result + fuzzyPrefixLength; - result = 31 * result + (escape ? 1 : 0); - result = 31 * result + (defaultAnalyzer != null ? defaultAnalyzer.hashCode() : 0); - result = 31 * result + (defaultQuoteAnalyzer != null ? defaultQuoteAnalyzer.hashCode() : 0); - result = 31 * result + (forcedAnalyzer != null ? forcedAnalyzer.hashCode() : 0); - result = 31 * result + (forcedQuoteAnalyzer != null ? forcedQuoteAnalyzer.hashCode() : 0); - result = 31 * result + (analyzeWildcard ? 1 : 0); - - result = 31 * result + (fields != null ? fields.hashCode() : 0); - result = 31 * result + (boosts != null ? boosts.hashCode() : 0); - result = 31 * result + (tieBreaker != +0.0f ? Float.floatToIntBits(tieBreaker) : 0); - result = 31 * result + (useDisMax ? 1 : 0); - result = 31 * result + (locale != null ? locale.hashCode() : 0); - result = 31 * result + (timeZone != null ? timeZone.hashCode() : 0); - return result; - } - - public void setFuzziness(Fuzziness fuzziness) { + public void fuzziness(Fuzziness fuzziness) { this.fuzziness = fuzziness; } - public Fuzziness getFuzziness() { + public Fuzziness fuzziness() { return fuzziness; } } diff --git a/core/src/main/java/org/elasticsearch/ElasticsearchException.java b/core/src/main/java/org/elasticsearch/ElasticsearchException.java index c95b907098e..c82cab9a35f 100644 --- a/core/src/main/java/org/elasticsearch/ElasticsearchException.java +++ b/core/src/main/java/org/elasticsearch/ElasticsearchException.java @@ -463,153 +463,151 @@ public class ElasticsearchException extends RuntimeException implements ToXConte // change due to refactorings etc. like renaming we have to keep the ordinal <--> class mapping // to deserialize the exception coming from another node or from an corruption marker on // a corrupted index. + // NOTE: ONLY APPEND TO THE END and NEVER REMOVE EXCEPTIONS IN MINOR VERSIONS final Map, Integer> exceptions = new HashMap<>(); - exceptions.put(org.elasticsearch.index.snapshots.IndexShardSnapshotFailedException.class, 0); - exceptions.put(org.elasticsearch.search.dfs.DfsPhaseExecutionException.class, 1); - exceptions.put(org.elasticsearch.common.util.CancellableThreads.ExecutionCancelledException.class, 2); - exceptions.put(org.elasticsearch.discovery.MasterNotDiscoveredException.class, 3); - exceptions.put(org.elasticsearch.ElasticsearchSecurityException.class, 4); - exceptions.put(org.elasticsearch.index.snapshots.IndexShardRestoreException.class, 5); - exceptions.put(org.elasticsearch.indices.IndexClosedException.class, 6); - exceptions.put(org.elasticsearch.http.BindHttpException.class, 7); - exceptions.put(org.elasticsearch.action.search.ReduceSearchPhaseException.class, 8); - exceptions.put(org.elasticsearch.node.NodeClosedException.class, 9); - exceptions.put(org.elasticsearch.index.engine.SnapshotFailedEngineException.class, 10); - exceptions.put(org.elasticsearch.index.shard.ShardNotFoundException.class, 11); - exceptions.put(org.elasticsearch.transport.ConnectTransportException.class, 12); - exceptions.put(org.elasticsearch.transport.NotSerializableTransportException.class, 13); - exceptions.put(org.elasticsearch.transport.ResponseHandlerFailureTransportException.class, 14); - exceptions.put(org.elasticsearch.indices.IndexCreationException.class, 15); - exceptions.put(org.elasticsearch.index.IndexNotFoundException.class, 16); - exceptions.put(org.elasticsearch.cluster.routing.IllegalShardRoutingStateException.class, 17); - exceptions.put(org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException.class, 18); - exceptions.put(org.elasticsearch.ResourceNotFoundException.class, 19); - exceptions.put(org.elasticsearch.transport.ActionTransportException.class, 20); - exceptions.put(org.elasticsearch.ElasticsearchGenerationException.class, 21); - exceptions.put(org.elasticsearch.index.engine.CreateFailedEngineException.class, 22); - exceptions.put(org.elasticsearch.index.shard.IndexShardStartedException.class, 23); - exceptions.put(org.elasticsearch.search.SearchContextMissingException.class, 24); - exceptions.put(org.elasticsearch.script.ScriptException.class, 25); - exceptions.put(org.elasticsearch.index.shard.TranslogRecoveryPerformer.BatchOperationException.class, 26); - exceptions.put(org.elasticsearch.snapshots.SnapshotCreationException.class, 27); - exceptions.put(org.elasticsearch.index.engine.DeleteFailedEngineException.class, 28); - exceptions.put(org.elasticsearch.index.engine.DocumentMissingException.class, 29); - exceptions.put(org.elasticsearch.snapshots.SnapshotException.class, 30); - exceptions.put(org.elasticsearch.indices.InvalidAliasNameException.class, 31); - exceptions.put(org.elasticsearch.indices.InvalidIndexNameException.class, 32); - exceptions.put(org.elasticsearch.indices.IndexPrimaryShardNotAllocatedException.class, 33); - exceptions.put(org.elasticsearch.transport.TransportException.class, 34); - exceptions.put(org.elasticsearch.ElasticsearchParseException.class, 35); - exceptions.put(org.elasticsearch.search.SearchException.class, 36); - exceptions.put(org.elasticsearch.index.mapper.MapperException.class, 37); - exceptions.put(org.elasticsearch.indices.InvalidTypeNameException.class, 38); - exceptions.put(org.elasticsearch.snapshots.SnapshotRestoreException.class, 39); - exceptions.put(org.elasticsearch.common.ParsingException.class, 40); - exceptions.put(org.elasticsearch.index.shard.IndexShardClosedException.class, 41); - exceptions.put(org.elasticsearch.script.expression.ExpressionScriptCompilationException.class, 42); - exceptions.put(org.elasticsearch.indices.recovery.RecoverFilesRecoveryException.class, 43); - exceptions.put(org.elasticsearch.index.translog.TruncatedTranslogException.class, 44); - exceptions.put(org.elasticsearch.indices.recovery.RecoveryFailedException.class, 45); - exceptions.put(org.elasticsearch.index.shard.IndexShardRelocatedException.class, 46); - exceptions.put(org.elasticsearch.transport.NodeShouldNotConnectException.class, 47); - exceptions.put(org.elasticsearch.indices.IndexTemplateAlreadyExistsException.class, 48); - exceptions.put(org.elasticsearch.index.translog.TranslogCorruptedException.class, 49); - exceptions.put(org.elasticsearch.cluster.block.ClusterBlockException.class, 50); - exceptions.put(org.elasticsearch.search.fetch.FetchPhaseExecutionException.class, 51); - exceptions.put(org.elasticsearch.index.IndexShardAlreadyExistsException.class, 52); - exceptions.put(org.elasticsearch.index.engine.VersionConflictEngineException.class, 53); - exceptions.put(org.elasticsearch.index.engine.EngineException.class, 54); - exceptions.put(org.elasticsearch.index.engine.DocumentAlreadyExistsException.class, 55); - exceptions.put(org.elasticsearch.action.NoSuchNodeException.class, 56); - exceptions.put(org.elasticsearch.common.settings.SettingsException.class, 57); - exceptions.put(org.elasticsearch.indices.IndexTemplateMissingException.class, 58); - exceptions.put(org.elasticsearch.transport.SendRequestTransportException.class, 59); - exceptions.put(org.elasticsearch.common.util.concurrent.EsRejectedExecutionException.class, 60); - exceptions.put(org.elasticsearch.common.lucene.Lucene.EarlyTerminationException.class, 61); - exceptions.put(org.elasticsearch.cluster.routing.RoutingValidationException.class, 62); - exceptions.put(org.elasticsearch.common.io.stream.NotSerializableExceptionWrapper.class, 63); - exceptions.put(org.elasticsearch.indices.AliasFilterParsingException.class, 64); - exceptions.put(org.elasticsearch.index.engine.DeleteByQueryFailedEngineException.class, 65); - exceptions.put(org.elasticsearch.gateway.GatewayException.class, 66); - exceptions.put(org.elasticsearch.index.shard.IndexShardNotRecoveringException.class, 67); - exceptions.put(org.elasticsearch.http.HttpException.class, 68); - exceptions.put(org.elasticsearch.ElasticsearchException.class, 69); - exceptions.put(org.elasticsearch.snapshots.SnapshotMissingException.class, 70); - exceptions.put(org.elasticsearch.action.PrimaryMissingActionException.class, 71); - exceptions.put(org.elasticsearch.action.FailedNodeException.class, 72); - exceptions.put(org.elasticsearch.search.SearchParseException.class, 73); - exceptions.put(org.elasticsearch.snapshots.ConcurrentSnapshotExecutionException.class, 74); - exceptions.put(org.elasticsearch.common.blobstore.BlobStoreException.class, 75); - exceptions.put(org.elasticsearch.cluster.IncompatibleClusterStateVersionException.class, 76); - exceptions.put(org.elasticsearch.index.engine.RecoveryEngineException.class, 77); - exceptions.put(org.elasticsearch.common.util.concurrent.UncategorizedExecutionException.class, 78); - exceptions.put(org.elasticsearch.action.TimestampParsingException.class, 79); - exceptions.put(org.elasticsearch.action.RoutingMissingException.class, 80); - exceptions.put(org.elasticsearch.index.engine.IndexFailedEngineException.class, 81); - exceptions.put(org.elasticsearch.index.snapshots.IndexShardRestoreFailedException.class, 82); - exceptions.put(org.elasticsearch.repositories.RepositoryException.class, 83); - exceptions.put(org.elasticsearch.transport.ReceiveTimeoutTransportException.class, 84); - exceptions.put(org.elasticsearch.transport.NodeDisconnectedException.class, 85); - exceptions.put(org.elasticsearch.index.AlreadyExpiredException.class, 86); - exceptions.put(org.elasticsearch.search.aggregations.AggregationExecutionException.class, 87); - exceptions.put(org.elasticsearch.index.mapper.MergeMappingException.class, 88); - exceptions.put(org.elasticsearch.indices.InvalidIndexTemplateException.class, 89); - exceptions.put(org.elasticsearch.percolator.PercolateException.class, 90); - exceptions.put(org.elasticsearch.index.engine.RefreshFailedEngineException.class, 91); - exceptions.put(org.elasticsearch.search.aggregations.AggregationInitializationException.class, 92); - exceptions.put(org.elasticsearch.indices.recovery.DelayRecoveryException.class, 93); - exceptions.put(org.elasticsearch.search.warmer.IndexWarmerMissingException.class, 94); - exceptions.put(org.elasticsearch.client.transport.NoNodeAvailableException.class, 95); - exceptions.put(org.elasticsearch.script.groovy.GroovyScriptCompilationException.class, 96); - exceptions.put(org.elasticsearch.snapshots.InvalidSnapshotNameException.class, 97); - exceptions.put(org.elasticsearch.index.shard.IllegalIndexShardStateException.class, 98); - exceptions.put(org.elasticsearch.index.snapshots.IndexShardSnapshotException.class, 99); - exceptions.put(org.elasticsearch.index.shard.IndexShardNotStartedException.class, 100); - exceptions.put(org.elasticsearch.action.search.SearchPhaseExecutionException.class, 101); - exceptions.put(org.elasticsearch.transport.ActionNotFoundTransportException.class, 102); - exceptions.put(org.elasticsearch.transport.TransportSerializationException.class, 103); - exceptions.put(org.elasticsearch.transport.RemoteTransportException.class, 104); - exceptions.put(org.elasticsearch.index.engine.EngineCreationFailureException.class, 105); - exceptions.put(org.elasticsearch.cluster.routing.RoutingException.class, 106); - exceptions.put(org.elasticsearch.index.shard.IndexShardRecoveryException.class, 107); - exceptions.put(org.elasticsearch.repositories.RepositoryMissingException.class, 108); - exceptions.put(org.elasticsearch.script.expression.ExpressionScriptExecutionException.class, 109); - exceptions.put(org.elasticsearch.index.percolator.PercolatorException.class, 110); - exceptions.put(org.elasticsearch.index.engine.DocumentSourceMissingException.class, 111); - exceptions.put(org.elasticsearch.index.engine.FlushNotAllowedEngineException.class, 112); - exceptions.put(org.elasticsearch.common.settings.NoClassSettingsException.class, 113); - exceptions.put(org.elasticsearch.transport.BindTransportException.class, 114); - exceptions.put(org.elasticsearch.rest.action.admin.indices.alias.delete.AliasesNotFoundException.class, 115); - exceptions.put(org.elasticsearch.index.shard.IndexShardRecoveringException.class, 116); - exceptions.put(org.elasticsearch.index.translog.TranslogException.class, 117); - exceptions.put(org.elasticsearch.cluster.metadata.ProcessClusterEventTimeoutException.class, 118); - exceptions.put(org.elasticsearch.action.support.replication.TransportReplicationAction.RetryOnPrimaryException.class, 119); - exceptions.put(org.elasticsearch.ElasticsearchTimeoutException.class, 120); - exceptions.put(org.elasticsearch.search.query.QueryPhaseExecutionException.class, 121); - exceptions.put(org.elasticsearch.repositories.RepositoryVerificationException.class, 122); - exceptions.put(org.elasticsearch.search.aggregations.InvalidAggregationPathException.class, 123); - exceptions.put(org.elasticsearch.script.groovy.GroovyScriptExecutionException.class, 124); - exceptions.put(org.elasticsearch.indices.IndexAlreadyExistsException.class, 125); - exceptions.put(org.elasticsearch.script.Script.ScriptParseException.class, 126); - exceptions.put(org.elasticsearch.transport.netty.SizeHeaderFrameDecoder.HttpOnTransportException.class, 127); - exceptions.put(org.elasticsearch.index.mapper.MapperParsingException.class, 128); - exceptions.put(org.elasticsearch.search.SearchContextException.class, 129); - exceptions.put(org.elasticsearch.search.builder.SearchSourceBuilderException.class, 130); - exceptions.put(org.elasticsearch.index.engine.EngineClosedException.class, 131); - exceptions.put(org.elasticsearch.action.NoShardAvailableActionException.class, 132); - exceptions.put(org.elasticsearch.action.UnavailableShardsException.class, 133); - exceptions.put(org.elasticsearch.index.engine.FlushFailedEngineException.class, 134); - exceptions.put(org.elasticsearch.common.breaker.CircuitBreakingException.class, 135); - exceptions.put(org.elasticsearch.transport.NodeNotConnectedException.class, 136); - exceptions.put(org.elasticsearch.index.mapper.StrictDynamicMappingException.class, 137); - exceptions.put(org.elasticsearch.action.support.replication.TransportReplicationAction.RetryOnReplicaException.class, 138); - exceptions.put(org.elasticsearch.indices.TypeMissingException.class, 139); + exceptions.put(org.elasticsearch.index.snapshots.IndexShardSnapshotFailedException.class, exceptions.size()); + exceptions.put(org.elasticsearch.search.dfs.DfsPhaseExecutionException.class, exceptions.size()); + exceptions.put(org.elasticsearch.common.util.CancellableThreads.ExecutionCancelledException.class, exceptions.size()); + exceptions.put(org.elasticsearch.discovery.MasterNotDiscoveredException.class, exceptions.size()); + exceptions.put(org.elasticsearch.ElasticsearchSecurityException.class, exceptions.size()); + exceptions.put(org.elasticsearch.index.snapshots.IndexShardRestoreException.class, exceptions.size()); + exceptions.put(org.elasticsearch.indices.IndexClosedException.class, exceptions.size()); + exceptions.put(org.elasticsearch.http.BindHttpException.class, exceptions.size()); + exceptions.put(org.elasticsearch.action.search.ReduceSearchPhaseException.class, exceptions.size()); + exceptions.put(org.elasticsearch.node.NodeClosedException.class, exceptions.size()); + exceptions.put(org.elasticsearch.index.engine.SnapshotFailedEngineException.class, exceptions.size()); + exceptions.put(org.elasticsearch.index.shard.ShardNotFoundException.class, exceptions.size()); + exceptions.put(org.elasticsearch.transport.ConnectTransportException.class, exceptions.size()); + exceptions.put(org.elasticsearch.transport.NotSerializableTransportException.class, exceptions.size()); + exceptions.put(org.elasticsearch.transport.ResponseHandlerFailureTransportException.class, exceptions.size()); + exceptions.put(org.elasticsearch.indices.IndexCreationException.class, exceptions.size()); + exceptions.put(org.elasticsearch.index.IndexNotFoundException.class, exceptions.size()); + exceptions.put(org.elasticsearch.cluster.routing.IllegalShardRoutingStateException.class, exceptions.size()); + exceptions.put(org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException.class, exceptions.size()); + exceptions.put(org.elasticsearch.ResourceNotFoundException.class, exceptions.size()); + exceptions.put(org.elasticsearch.transport.ActionTransportException.class, exceptions.size()); + exceptions.put(org.elasticsearch.ElasticsearchGenerationException.class, exceptions.size()); + exceptions.put(org.elasticsearch.index.engine.CreateFailedEngineException.class, exceptions.size()); + exceptions.put(org.elasticsearch.index.shard.IndexShardStartedException.class, exceptions.size()); + exceptions.put(org.elasticsearch.search.SearchContextMissingException.class, exceptions.size()); + exceptions.put(org.elasticsearch.script.ScriptException.class, exceptions.size()); + exceptions.put(org.elasticsearch.index.shard.TranslogRecoveryPerformer.BatchOperationException.class, exceptions.size()); + exceptions.put(org.elasticsearch.snapshots.SnapshotCreationException.class, exceptions.size()); + exceptions.put(org.elasticsearch.index.engine.DeleteFailedEngineException.class, exceptions.size()); + exceptions.put(org.elasticsearch.index.engine.DocumentMissingException.class, exceptions.size()); + exceptions.put(org.elasticsearch.snapshots.SnapshotException.class, exceptions.size()); + exceptions.put(org.elasticsearch.indices.InvalidAliasNameException.class, exceptions.size()); + exceptions.put(org.elasticsearch.indices.InvalidIndexNameException.class, exceptions.size()); + exceptions.put(org.elasticsearch.indices.IndexPrimaryShardNotAllocatedException.class, exceptions.size()); + exceptions.put(org.elasticsearch.transport.TransportException.class, exceptions.size()); + exceptions.put(org.elasticsearch.ElasticsearchParseException.class, exceptions.size()); + exceptions.put(org.elasticsearch.search.SearchException.class, exceptions.size()); + exceptions.put(org.elasticsearch.index.mapper.MapperException.class, exceptions.size()); + exceptions.put(org.elasticsearch.indices.InvalidTypeNameException.class, exceptions.size()); + exceptions.put(org.elasticsearch.snapshots.SnapshotRestoreException.class, exceptions.size()); + exceptions.put(org.elasticsearch.common.ParsingException.class, exceptions.size()); + exceptions.put(org.elasticsearch.index.shard.IndexShardClosedException.class, exceptions.size()); + exceptions.put(org.elasticsearch.indices.recovery.RecoverFilesRecoveryException.class, exceptions.size()); + exceptions.put(org.elasticsearch.index.translog.TruncatedTranslogException.class, exceptions.size()); + exceptions.put(org.elasticsearch.indices.recovery.RecoveryFailedException.class, exceptions.size()); + exceptions.put(org.elasticsearch.index.shard.IndexShardRelocatedException.class, exceptions.size()); + exceptions.put(org.elasticsearch.transport.NodeShouldNotConnectException.class, exceptions.size()); + exceptions.put(org.elasticsearch.indices.IndexTemplateAlreadyExistsException.class, exceptions.size()); + exceptions.put(org.elasticsearch.index.translog.TranslogCorruptedException.class, exceptions.size()); + exceptions.put(org.elasticsearch.cluster.block.ClusterBlockException.class, exceptions.size()); + exceptions.put(org.elasticsearch.search.fetch.FetchPhaseExecutionException.class, exceptions.size()); + exceptions.put(org.elasticsearch.index.IndexShardAlreadyExistsException.class, exceptions.size()); + exceptions.put(org.elasticsearch.index.engine.VersionConflictEngineException.class, exceptions.size()); + exceptions.put(org.elasticsearch.index.engine.EngineException.class, exceptions.size()); + exceptions.put(org.elasticsearch.index.engine.DocumentAlreadyExistsException.class, exceptions.size()); + exceptions.put(org.elasticsearch.action.NoSuchNodeException.class, exceptions.size()); + exceptions.put(org.elasticsearch.common.settings.SettingsException.class, exceptions.size()); + exceptions.put(org.elasticsearch.indices.IndexTemplateMissingException.class, exceptions.size()); + exceptions.put(org.elasticsearch.transport.SendRequestTransportException.class, exceptions.size()); + exceptions.put(org.elasticsearch.common.util.concurrent.EsRejectedExecutionException.class, exceptions.size()); + exceptions.put(org.elasticsearch.common.lucene.Lucene.EarlyTerminationException.class, exceptions.size()); + exceptions.put(org.elasticsearch.cluster.routing.RoutingValidationException.class, exceptions.size()); + exceptions.put(org.elasticsearch.common.io.stream.NotSerializableExceptionWrapper.class, exceptions.size()); + exceptions.put(org.elasticsearch.indices.AliasFilterParsingException.class, exceptions.size()); + exceptions.put(org.elasticsearch.index.engine.DeleteByQueryFailedEngineException.class, exceptions.size()); + exceptions.put(org.elasticsearch.gateway.GatewayException.class, exceptions.size()); + exceptions.put(org.elasticsearch.index.shard.IndexShardNotRecoveringException.class, exceptions.size()); + exceptions.put(org.elasticsearch.http.HttpException.class, exceptions.size()); + exceptions.put(org.elasticsearch.ElasticsearchException.class, exceptions.size()); + exceptions.put(org.elasticsearch.snapshots.SnapshotMissingException.class, exceptions.size()); + exceptions.put(org.elasticsearch.action.PrimaryMissingActionException.class, exceptions.size()); + exceptions.put(org.elasticsearch.action.FailedNodeException.class, exceptions.size()); + exceptions.put(org.elasticsearch.search.SearchParseException.class, exceptions.size()); + exceptions.put(org.elasticsearch.snapshots.ConcurrentSnapshotExecutionException.class, exceptions.size()); + exceptions.put(org.elasticsearch.common.blobstore.BlobStoreException.class, exceptions.size()); + exceptions.put(org.elasticsearch.cluster.IncompatibleClusterStateVersionException.class, exceptions.size()); + exceptions.put(org.elasticsearch.index.engine.RecoveryEngineException.class, exceptions.size()); + exceptions.put(org.elasticsearch.common.util.concurrent.UncategorizedExecutionException.class, exceptions.size()); + exceptions.put(org.elasticsearch.action.TimestampParsingException.class, exceptions.size()); + exceptions.put(org.elasticsearch.action.RoutingMissingException.class, exceptions.size()); + exceptions.put(org.elasticsearch.index.engine.IndexFailedEngineException.class, exceptions.size()); + exceptions.put(org.elasticsearch.index.snapshots.IndexShardRestoreFailedException.class, exceptions.size()); + exceptions.put(org.elasticsearch.repositories.RepositoryException.class, exceptions.size()); + exceptions.put(org.elasticsearch.transport.ReceiveTimeoutTransportException.class, exceptions.size()); + exceptions.put(org.elasticsearch.transport.NodeDisconnectedException.class, exceptions.size()); + exceptions.put(org.elasticsearch.index.AlreadyExpiredException.class, exceptions.size()); + exceptions.put(org.elasticsearch.search.aggregations.AggregationExecutionException.class, exceptions.size()); + exceptions.put(org.elasticsearch.index.mapper.MergeMappingException.class, exceptions.size()); + exceptions.put(org.elasticsearch.indices.InvalidIndexTemplateException.class, exceptions.size()); + exceptions.put(org.elasticsearch.percolator.PercolateException.class, exceptions.size()); + exceptions.put(org.elasticsearch.index.engine.RefreshFailedEngineException.class, exceptions.size()); + exceptions.put(org.elasticsearch.search.aggregations.AggregationInitializationException.class, exceptions.size()); + exceptions.put(org.elasticsearch.indices.recovery.DelayRecoveryException.class, exceptions.size()); + exceptions.put(org.elasticsearch.search.warmer.IndexWarmerMissingException.class, exceptions.size()); + exceptions.put(org.elasticsearch.client.transport.NoNodeAvailableException.class, exceptions.size()); + exceptions.put(org.elasticsearch.script.groovy.GroovyScriptCompilationException.class, exceptions.size()); + exceptions.put(org.elasticsearch.snapshots.InvalidSnapshotNameException.class, exceptions.size()); + exceptions.put(org.elasticsearch.index.shard.IllegalIndexShardStateException.class, exceptions.size()); + exceptions.put(org.elasticsearch.index.snapshots.IndexShardSnapshotException.class, exceptions.size()); + exceptions.put(org.elasticsearch.index.shard.IndexShardNotStartedException.class, exceptions.size()); + exceptions.put(org.elasticsearch.action.search.SearchPhaseExecutionException.class, exceptions.size()); + exceptions.put(org.elasticsearch.transport.ActionNotFoundTransportException.class, exceptions.size()); + exceptions.put(org.elasticsearch.transport.TransportSerializationException.class, exceptions.size()); + exceptions.put(org.elasticsearch.transport.RemoteTransportException.class, exceptions.size()); + exceptions.put(org.elasticsearch.index.engine.EngineCreationFailureException.class, exceptions.size()); + exceptions.put(org.elasticsearch.cluster.routing.RoutingException.class, exceptions.size()); + exceptions.put(org.elasticsearch.index.shard.IndexShardRecoveryException.class, exceptions.size()); + exceptions.put(org.elasticsearch.repositories.RepositoryMissingException.class, exceptions.size()); + exceptions.put(org.elasticsearch.index.percolator.PercolatorException.class, exceptions.size()); + exceptions.put(org.elasticsearch.index.engine.DocumentSourceMissingException.class, exceptions.size()); + exceptions.put(org.elasticsearch.index.engine.FlushNotAllowedEngineException.class, exceptions.size()); + exceptions.put(org.elasticsearch.common.settings.NoClassSettingsException.class, exceptions.size()); + exceptions.put(org.elasticsearch.transport.BindTransportException.class, exceptions.size()); + exceptions.put(org.elasticsearch.rest.action.admin.indices.alias.delete.AliasesNotFoundException.class, exceptions.size()); + exceptions.put(org.elasticsearch.index.shard.IndexShardRecoveringException.class, exceptions.size()); + exceptions.put(org.elasticsearch.index.translog.TranslogException.class, exceptions.size()); + exceptions.put(org.elasticsearch.cluster.metadata.ProcessClusterEventTimeoutException.class, exceptions.size()); + exceptions.put(org.elasticsearch.action.support.replication.TransportReplicationAction.RetryOnPrimaryException.class, exceptions.size()); + exceptions.put(org.elasticsearch.ElasticsearchTimeoutException.class, exceptions.size()); + exceptions.put(org.elasticsearch.search.query.QueryPhaseExecutionException.class, exceptions.size()); + exceptions.put(org.elasticsearch.repositories.RepositoryVerificationException.class, exceptions.size()); + exceptions.put(org.elasticsearch.search.aggregations.InvalidAggregationPathException.class, exceptions.size()); + exceptions.put(org.elasticsearch.script.groovy.GroovyScriptExecutionException.class, exceptions.size()); + exceptions.put(org.elasticsearch.indices.IndexAlreadyExistsException.class, exceptions.size()); + exceptions.put(org.elasticsearch.script.Script.ScriptParseException.class, exceptions.size()); + exceptions.put(org.elasticsearch.transport.netty.SizeHeaderFrameDecoder.HttpOnTransportException.class, exceptions.size()); + exceptions.put(org.elasticsearch.index.mapper.MapperParsingException.class, exceptions.size()); + exceptions.put(org.elasticsearch.search.SearchContextException.class, exceptions.size()); + exceptions.put(org.elasticsearch.search.builder.SearchSourceBuilderException.class, exceptions.size()); + exceptions.put(org.elasticsearch.index.engine.EngineClosedException.class, exceptions.size()); + exceptions.put(org.elasticsearch.action.NoShardAvailableActionException.class, exceptions.size()); + exceptions.put(org.elasticsearch.action.UnavailableShardsException.class, exceptions.size()); + exceptions.put(org.elasticsearch.index.engine.FlushFailedEngineException.class, exceptions.size()); + exceptions.put(org.elasticsearch.common.breaker.CircuitBreakingException.class, exceptions.size()); + exceptions.put(org.elasticsearch.transport.NodeNotConnectedException.class, exceptions.size()); + exceptions.put(org.elasticsearch.index.mapper.StrictDynamicMappingException.class, exceptions.size()); + exceptions.put(org.elasticsearch.action.support.replication.TransportReplicationAction.RetryOnReplicaException.class, exceptions.size()); + exceptions.put(org.elasticsearch.indices.TypeMissingException.class, exceptions.size()); // added in 3.x - exceptions.put(org.elasticsearch.discovery.Discovery.FailedToCommitClusterStateException.class, 140); - - final int maxOrd = 140; - assert exceptions.size() == maxOrd + 1; - Constructor[] idToSupplier = new Constructor[maxOrd + 1]; + exceptions.put(org.elasticsearch.discovery.Discovery.FailedToCommitClusterStateException.class, exceptions.size()); + exceptions.put(org.elasticsearch.index.query.QueryShardException.class, exceptions.size()); + // NOTE: ONLY APPEND TO THE END and NEVER REMOVE EXCEPTIONS IN MINOR VERSIONS + Constructor[] idToSupplier = new Constructor[exceptions.size()]; for (Map.Entry, Integer> e : exceptions.entrySet()) { try { Constructor constructor = e.getKey().getDeclaredConstructor(StreamInput.class); diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/create/TransportCreateIndexAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/create/TransportCreateIndexAction.java index cfa3a435065..98a002cc2fb 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/create/TransportCreateIndexAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/create/TransportCreateIndexAction.java @@ -73,7 +73,8 @@ public class TransportCreateIndexAction extends TransportMasterNodeAction 0) { try { - QueryParseContext.setTypes(request.types()); + QueryShardContext.setTypes(request.types()); context.parsedQuery(indexService.queryParserService().parseQuery(source)); } finally { - QueryParseContext.removeTypes(); + QueryShardContext.removeTypes(); } } context.preProcess(); diff --git a/core/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java index d6007151e1b..a57008056b4 100644 --- a/core/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java @@ -715,8 +715,13 @@ public class SearchRequestBuilder extends ActionRequestBuilder + * This is only supported on the amd64 architecture, on Linux kernels 3.5 or above, and requires + * {@code CONFIG_SECCOMP} and {@code CONFIG_SECCOMP_FILTER} compiled into the kernel. + *

+ * Filters are installed using either {@code seccomp(2)} (3.17+) or {@code prctl(2)} (3.5+). {@code seccomp(2)} + * is preferred, as it allows filters to be applied to any existing threads in the process, and one motivation + * here is to protect against bugs in the JVM. Otherwise, code will fall back to the {@code prctl(2)} method + * which will at least protect elasticsearch application threads. + *

+ * The filters will return {@code EACCES} (Access Denied) for the following system calls: + *

    + *
  • {@code execve}
  • + *
  • {@code fork}
  • + *
  • {@code vfork}
  • + *
+ *

+ * This is not intended as a sandbox. It is another level of security, mostly intended to annoy + * security researchers and make their lives more difficult in achieving "remote execution" exploits. + * @see + * http://www.kernel.org/doc/Documentation/prctl/seccomp_filter.txt + */ +// only supported on linux/amd64 +// not an example of how to write code!!! +final class Seccomp { + private static final ESLogger logger = Loggers.getLogger(Seccomp.class); + + /** we use an explicit interface for native methods, for varargs support */ + static interface LinuxLibrary extends Library { + /** + * maps to prctl(2) + */ + int prctl(int option, long arg2, long arg3, long arg4, long arg5); + /** + * used to call seccomp(2), its too new... + * this is the only way, DONT use it on some other architecture unless you know wtf you are doing + */ + long syscall(long number, Object... args); + }; + + // null if something goes wrong. + static final LinuxLibrary libc; + + static { + LinuxLibrary lib = null; + try { + lib = (LinuxLibrary) Native.loadLibrary("c", LinuxLibrary.class); + } catch (UnsatisfiedLinkError e) { + logger.warn("unable to link C library. native methods (seccomp) will be disabled.", e); + } + libc = lib; + } + + /** the preferred method is seccomp(2), since we can apply to all threads of the process */ + static final int SECCOMP_SYSCALL_NR = 317; // since Linux 3.17 + static final int SECCOMP_SET_MODE_FILTER = 1; // since Linux 3.17 + static final int SECCOMP_FILTER_FLAG_TSYNC = 1; // since Linux 3.17 + + /** otherwise, we can use prctl(2), which will at least protect ES application threads */ + static final int PR_GET_NO_NEW_PRIVS = 39; // since Linux 3.5 + static final int PR_SET_NO_NEW_PRIVS = 38; // since Linux 3.5 + static final int PR_GET_SECCOMP = 21; // since Linux 2.6.23 + static final int PR_SET_SECCOMP = 22; // since Linux 2.6.23 + static final int SECCOMP_MODE_FILTER = 2; // since Linux Linux 3.5 + + /** corresponds to struct sock_filter */ + static final class SockFilter { + short code; // insn + byte jt; // number of insn to jump (skip) if true + byte jf; // number of insn to jump (skip) if false + int k; // additional data + + SockFilter(short code, byte jt, byte jf, int k) { + this.code = code; + this.jt = jt; + this.jf = jf; + this.k = k; + } + } + + /** corresponds to struct sock_fprog */ + public static final class SockFProg extends Structure implements Structure.ByReference { + public short len; // number of filters + public Pointer filter; // filters + + public SockFProg(SockFilter filters[]) { + len = (short) filters.length; + // serialize struct sock_filter * explicitly, its less confusing than the JNA magic we would need + Memory filter = new Memory(len * 8); + ByteBuffer bbuf = filter.getByteBuffer(0, len * 8); + bbuf.order(ByteOrder.nativeOrder()); // little endian + for (SockFilter f : filters) { + bbuf.putShort(f.code); + bbuf.put(f.jt); + bbuf.put(f.jf); + bbuf.putInt(f.k); + } + this.filter = filter; + } + + @Override + protected List getFieldOrder() { + return Arrays.asList(new String[] { "len", "filter" }); + } + } + + // BPF "macros" and constants + static final int BPF_LD = 0x00; + static final int BPF_W = 0x00; + static final int BPF_ABS = 0x20; + static final int BPF_JMP = 0x05; + static final int BPF_JEQ = 0x10; + static final int BPF_JGE = 0x30; + static final int BPF_JGT = 0x20; + static final int BPF_RET = 0x06; + static final int BPF_K = 0x00; + + static SockFilter BPF_STMT(int code, int k) { + return new SockFilter((short) code, (byte) 0, (byte) 0, k); + } + + static SockFilter BPF_JUMP(int code, int k, int jt, int jf) { + return new SockFilter((short) code, (byte) jt, (byte) jf, k); + } + + static final int AUDIT_ARCH_X86_64 = 0xC000003E; + static final int SECCOMP_RET_ERRNO = 0x00050000; + static final int SECCOMP_RET_DATA = 0x0000FFFF; + static final int SECCOMP_RET_ALLOW = 0x7FFF0000; + + // some errno constants for error checking/handling + static final int EACCES = 0x0D; + static final int EFAULT = 0x0E; + static final int EINVAL = 0x16; + static final int ENOSYS = 0x26; + + // offsets (arch dependent) that our BPF checks + static final int SECCOMP_DATA_NR_OFFSET = 0x00; + static final int SECCOMP_DATA_ARCH_OFFSET = 0x04; + + // currently this range is blocked (inclusive): + // execve is really the only one needed but why let someone fork a 30G heap? (not really what happens) + // ... + // 57: fork + // 58: vfork + // 59: execve + // ... + static final int BLACKLIST_START = 57; + static final int BLACKLIST_END = 59; + + // TODO: execveat()? its less of a risk since the jvm does not use it... + + /** try to install our filters */ + static void installFilter() { + // first be defensive: we can give nice errors this way, at the very least. + // also, some of these security features get backported to old versions, checking kernel version here is a big no-no! + boolean supported = Constants.LINUX && "amd64".equals(Constants.OS_ARCH); + if (supported == false) { + throw new IllegalStateException("bug: should not be trying to initialize seccomp for an unsupported architecture"); + } + + // we couldn't link methods, could be some really ancient kernel (e.g. < 2.1.57) or some bug + if (libc == null) { + throw new UnsupportedOperationException("seccomp unavailable: could not link methods. requires kernel 3.5+ with CONFIG_SECCOMP and CONFIG_SECCOMP_FILTER compiled in"); + } + + // check for kernel version + if (libc.prctl(PR_GET_NO_NEW_PRIVS, 0, 0, 0, 0) < 0) { + int errno = Native.getLastError(); + switch (errno) { + case ENOSYS: throw new UnsupportedOperationException("seccomp unavailable: requires kernel 3.5+ with CONFIG_SECCOMP and CONFIG_SECCOMP_FILTER compiled in"); + default: throw new UnsupportedOperationException("prctl(PR_GET_NO_NEW_PRIVS): " + JNACLibrary.strerror(errno)); + } + } + // check for SECCOMP + if (libc.prctl(PR_GET_SECCOMP, 0, 0, 0, 0) < 0) { + int errno = Native.getLastError(); + switch (errno) { + case EINVAL: throw new UnsupportedOperationException("seccomp unavailable: CONFIG_SECCOMP not compiled into kernel, CONFIG_SECCOMP and CONFIG_SECCOMP_FILTER are needed"); + default: throw new UnsupportedOperationException("prctl(PR_GET_SECCOMP): " + JNACLibrary.strerror(errno)); + } + } + // check for SECCOMP_MODE_FILTER + if (libc.prctl(PR_SET_SECCOMP, SECCOMP_MODE_FILTER, 0, 0, 0) < 0) { + int errno = Native.getLastError(); + switch (errno) { + case EFAULT: break; // available + case EINVAL: throw new UnsupportedOperationException("seccomp unavailable: CONFIG_SECCOMP_FILTER not compiled into kernel, CONFIG_SECCOMP and CONFIG_SECCOMP_FILTER are needed"); + default: throw new UnsupportedOperationException("prctl(PR_SET_SECCOMP): " + JNACLibrary.strerror(errno)); + } + } + + // ok, now set PR_SET_NO_NEW_PRIVS, needed to be able to set a seccomp filter as ordinary user + if (libc.prctl(PR_SET_NO_NEW_PRIVS, 1, 0, 0, 0) < 0) { + throw new UnsupportedOperationException("prctl(PR_SET_NO_NEW_PRIVS): " + JNACLibrary.strerror(Native.getLastError())); + } + + // BPF installed to check arch, then syscall range. See https://www.kernel.org/doc/Documentation/prctl/seccomp_filter.txt for details. + SockFilter insns[] = { + /* 1 */ BPF_STMT(BPF_LD + BPF_W + BPF_ABS, SECCOMP_DATA_ARCH_OFFSET), // if (arch != amd64) goto fail; + /* 2 */ BPF_JUMP(BPF_JMP + BPF_JEQ + BPF_K, AUDIT_ARCH_X86_64, 0, 3), // + /* 3 */ BPF_STMT(BPF_LD + BPF_W + BPF_ABS, SECCOMP_DATA_NR_OFFSET), // if (syscall < BLACKLIST_START) goto pass; + /* 4 */ BPF_JUMP(BPF_JMP + BPF_JGE + BPF_K, BLACKLIST_START, 0, 2), // + /* 5 */ BPF_JUMP(BPF_JMP + BPF_JGT + BPF_K, BLACKLIST_END, 1, 0), // if (syscall > BLACKLIST_END) goto pass; + /* 6 */ BPF_STMT(BPF_RET + BPF_K, SECCOMP_RET_ERRNO | (EACCES & SECCOMP_RET_DATA)), // fail: return EACCES; + /* 7 */ BPF_STMT(BPF_RET + BPF_K, SECCOMP_RET_ALLOW) // pass: return OK; + }; + + // seccomp takes a long, so we pass it one explicitly to keep the JNA simple + SockFProg prog = new SockFProg(insns); + prog.write(); + long pointer = Pointer.nativeValue(prog.getPointer()); + + // install filter, if this works, after this there is no going back! + // first try it with seccomp(SECCOMP_SET_MODE_FILTER), falling back to prctl() + if (libc.syscall(SECCOMP_SYSCALL_NR, SECCOMP_SET_MODE_FILTER, SECCOMP_FILTER_FLAG_TSYNC, pointer) != 0) { + int errno1 = Native.getLastError(); + if (logger.isDebugEnabled()) { + logger.debug("seccomp(SECCOMP_SET_MODE_FILTER): " + JNACLibrary.strerror(errno1) + ", falling back to prctl(PR_SET_SECCOMP)..."); + } + if (libc.prctl(PR_SET_SECCOMP, SECCOMP_MODE_FILTER, pointer, 0, 0) < 0) { + int errno2 = Native.getLastError(); + throw new UnsupportedOperationException("seccomp(SECCOMP_SET_MODE_FILTER): " + JNACLibrary.strerror(errno1) + + ", prctl(PR_SET_SECCOMP): " + JNACLibrary.strerror(errno2)); + } + } + + // now check that the filter was really installed, we should be in filter mode. + if (libc.prctl(PR_GET_SECCOMP, 0, 0, 0, 0) != 2) { + throw new UnsupportedOperationException("seccomp filter installation did not really succeed. seccomp(PR_GET_SECCOMP): " + JNACLibrary.strerror(Native.getLastError())); + } + } +} diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/AliasValidator.java b/core/src/main/java/org/elasticsearch/cluster/metadata/AliasValidator.java index f12824de4ee..9d1a39d1dac 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/AliasValidator.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/AliasValidator.java @@ -28,7 +28,7 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.Index; import org.elasticsearch.index.query.IndexQueryParserService; -import org.elasticsearch.index.query.QueryParseContext; +import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.indices.InvalidAliasNameException; import java.io.IOException; @@ -142,10 +142,10 @@ public class AliasValidator extends AbstractComponent { } private void validateAliasFilter(XContentParser parser, IndexQueryParserService indexQueryParserService) throws IOException { - QueryParseContext context = indexQueryParserService.getParseContext(); + QueryShardContext context = indexQueryParserService.getShardContext(); try { context.reset(parser); - context.parseInnerFilter(); + context.parseContext().parseInnerQueryBuilder().toFilter(context); } finally { context.reset(null); parser.close(); diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java b/core/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java index 05c580ffc1b..adc94a5c74a 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java @@ -219,6 +219,15 @@ public class IndexNameExpressionResolver extends AbstractComponent { return state.metaData().getAliasAndIndexLookup().containsKey(resolvedAliasOrIndex); } + /** + * @return If the specified string is data math expression then this method returns the resolved expression. + */ + public String resolveDateMathExpression(String dateExpression) { + // The data math expression resolver doesn't rely on cluster state or indices options, because + // it just resolves the date math to an actual date. + return dateMathExpressionResolver.resolveExpression(dateExpression, new Context(null, null)); + } + /** * Iterates through the list of indices and selects the effective list of filtering aliases for the * given index. diff --git a/core/src/main/java/org/elasticsearch/common/Numbers.java b/core/src/main/java/org/elasticsearch/common/Numbers.java index df57c55b983..52d0337ef73 100644 --- a/core/src/main/java/org/elasticsearch/common/Numbers.java +++ b/core/src/main/java/org/elasticsearch/common/Numbers.java @@ -171,4 +171,11 @@ public final class Numbers { return longToBytes(Double.doubleToRawLongBits(val)); } + /** Returns true if value is neither NaN nor infinite. */ + public static boolean isValidDouble(double value) { + if (Double.isNaN(value) || Double.isInfinite(value)) { + return false; + } + return true; + } } diff --git a/core/src/main/java/org/elasticsearch/common/ParsingException.java b/core/src/main/java/org/elasticsearch/common/ParsingException.java index 8ce2dd177da..0519ab38339 100644 --- a/core/src/main/java/org/elasticsearch/common/ParsingException.java +++ b/core/src/main/java/org/elasticsearch/common/ParsingException.java @@ -24,15 +24,14 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentLocation; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.Index; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.rest.RestStatus; import java.io.IOException; /** - * + * Exception that can be used when parsing queries with a given {@link QueryParseContext}. + * Can contain information about location of the error. */ public class ParsingException extends ElasticsearchException { @@ -40,25 +39,17 @@ public class ParsingException extends ElasticsearchException { private final int lineNumber; private final int columnNumber; - public ParsingException(QueryParseContext parseContext, String msg, Object... args) { - this(parseContext, msg, null, args); + public ParsingException(XContentLocation contentLocation, String msg, Object... args) { + this(contentLocation, msg, null, args); } - public ParsingException(QueryParseContext parseContext, String msg, Throwable cause, Object... args) { - this(parseContext.index(), parseContext.parser(), msg, cause, args); - } - - public ParsingException(Index index, XContentParser parser, String msg, Throwable cause, Object... args) { + public ParsingException(XContentLocation contentLocation, String msg, Throwable cause, Object... args) { super(msg, cause, args); - setIndex(index); int lineNumber = UNKNOWN_POSITION; int columnNumber = UNKNOWN_POSITION; - if (parser != null) { - XContentLocation location = parser.getTokenLocation(); - if (location != null) { - lineNumber = location.lineNumber; - columnNumber = location.columnNumber; - } + if (contentLocation != null) { + lineNumber = contentLocation.lineNumber; + columnNumber = contentLocation.columnNumber; } this.columnNumber = columnNumber; this.lineNumber = lineNumber; @@ -68,16 +59,21 @@ public class ParsingException extends ElasticsearchException { * This constructor is provided for use in unit tests where a * {@link QueryParseContext} may not be available */ - public ParsingException(Index index, int line, int col, String msg, Throwable cause) { + public ParsingException(int line, int col, String msg, Throwable cause) { super(msg, cause); - setIndex(index); this.lineNumber = line; this.columnNumber = col; } + public ParsingException(StreamInput in) throws IOException{ + super(in); + lineNumber = in.readInt(); + columnNumber = in.readInt(); + } + /** * Line number of the location of the error - * + * * @return the line number or -1 if unknown */ public int getLineNumber() { @@ -86,7 +82,7 @@ public class ParsingException extends ElasticsearchException { /** * Column number of the location of the error - * + * * @return the column number or -1 if unknown */ public int getColumnNumber() { @@ -113,11 +109,4 @@ public class ParsingException extends ElasticsearchException { out.writeInt(lineNumber); out.writeInt(columnNumber); } - - public ParsingException(StreamInput in) throws IOException{ - super(in); - lineNumber = in.readInt(); - columnNumber = in.readInt(); - } - } diff --git a/core/src/main/java/org/elasticsearch/common/Strings.java b/core/src/main/java/org/elasticsearch/common/Strings.java index 641db0ef963..a324736716b 100644 --- a/core/src/main/java/org/elasticsearch/common/Strings.java +++ b/core/src/main/java/org/elasticsearch/common/Strings.java @@ -570,7 +570,6 @@ public class Strings { count++; } } - // TODO (MvG): No push: hppc or jcf? final Set result = new HashSet<>(count); final int len = chars.length; int start = 0; // starting index in chars of the current substring. diff --git a/core/src/main/java/org/elasticsearch/common/geo/GeoDistance.java b/core/src/main/java/org/elasticsearch/common/geo/GeoDistance.java index fca80970439..2a31596eab9 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/GeoDistance.java +++ b/core/src/main/java/org/elasticsearch/common/geo/GeoDistance.java @@ -21,6 +21,9 @@ package org.elasticsearch.common.geo; import org.apache.lucene.util.Bits; import org.apache.lucene.util.SloppyMath; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.index.fielddata.FieldData; import org.elasticsearch.index.fielddata.GeoPointValues; @@ -28,17 +31,17 @@ import org.elasticsearch.index.fielddata.MultiGeoPointValues; import org.elasticsearch.index.fielddata.NumericDoubleValues; import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; import org.elasticsearch.index.fielddata.SortingNumericDoubleValues; - +import java.io.IOException; import java.util.Locale; /** * Geo distance calculation. */ -public enum GeoDistance { +public enum GeoDistance implements Writeable { /** * Calculates distance as points on a plane. Faster, but less accurate than {@link #ARC}. */ - PLANE() { + PLANE { @Override public double calculate(double sourceLatitude, double sourceLongitude, double targetLatitude, double targetLongitude, DistanceUnit unit) { double px = targetLongitude - sourceLongitude; @@ -60,7 +63,7 @@ public enum GeoDistance { /** * Calculates distance factor. */ - FACTOR() { + FACTOR { @Override public double calculate(double sourceLatitude, double sourceLongitude, double targetLatitude, double targetLongitude, DistanceUnit unit) { double longitudeDifference = targetLongitude - sourceLongitude; @@ -82,7 +85,7 @@ public enum GeoDistance { /** * Calculates distance as points on a globe. */ - ARC() { + ARC { @Override public double calculate(double sourceLatitude, double sourceLongitude, double targetLatitude, double targetLongitude, DistanceUnit unit) { double x1 = sourceLatitude * Math.PI / 180D; @@ -109,7 +112,7 @@ public enum GeoDistance { * Calculates distance as points on a globe in a sloppy way. Close to the pole areas the accuracy * of this function decreases. */ - SLOPPY_ARC() { + SLOPPY_ARC { @Override public double normalize(double distance, DistanceUnit unit) { @@ -127,12 +130,31 @@ public enum GeoDistance { } }; + /** Returns a GeoDistance object as read from the StreamInput. */ + @Override + public GeoDistance readFrom(StreamInput in) throws IOException { + int ord = in.readVInt(); + if (ord < 0 || ord >= values().length) { + throw new IOException("Unknown GeoDistance ordinal [" + ord + "]"); + } + return GeoDistance.values()[ord]; + } + + public static GeoDistance readGeoDistanceFrom(StreamInput in) throws IOException { + return DEFAULT.readFrom(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeVInt(this.ordinal()); + } + /** * Default {@link GeoDistance} function. This method should be used, If no specific function has been selected. * This is an alias for SLOPPY_ARC */ - public static final GeoDistance DEFAULT = SLOPPY_ARC; - + public static final GeoDistance DEFAULT = SLOPPY_ARC; + public abstract double normalize(double distance, DistanceUnit unit); public abstract double calculate(double sourceLatitude, double sourceLongitude, double targetLatitude, double targetLongitude, DistanceUnit unit); @@ -180,14 +202,14 @@ public enum GeoDistance { /** * Get a {@link GeoDistance} according to a given name. Valid values are - * + * *

    *
  • plane for GeoDistance.PLANE
  • *
  • sloppy_arc for GeoDistance.SLOPPY_ARC
  • *
  • factor for GeoDistance.FACTOR
  • *
  • arc for GeoDistance.ARC
  • *
- * + * * @param name name of the {@link GeoDistance} * @return a {@link GeoDistance} */ @@ -336,7 +358,7 @@ public enum GeoDistance { /** * Basic implementation of {@link FixedSourceDistance}. This class keeps the basic parameters for a distance - * functions based on a fixed source. Namely latitude, longitude and unit. + * functions based on a fixed source. Namely latitude, longitude and unit. */ public static abstract class FixedSourceDistanceBase implements FixedSourceDistance { protected final double sourceLatitude; @@ -349,7 +371,7 @@ public enum GeoDistance { this.unit = unit; } } - + public static class ArcFixedSourceDistance extends FixedSourceDistanceBase { public ArcFixedSourceDistance(double sourceLatitude, double sourceLongitude, DistanceUnit unit) { diff --git a/core/src/main/java/org/elasticsearch/common/geo/GeoPoint.java b/core/src/main/java/org/elasticsearch/common/geo/GeoPoint.java index 6b4ffd23a61..c50b85a835c 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/GeoPoint.java +++ b/core/src/main/java/org/elasticsearch/common/geo/GeoPoint.java @@ -19,6 +19,11 @@ package org.elasticsearch.common.geo; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; + +import java.io.IOException; import org.apache.lucene.util.BitUtil; import org.apache.lucene.util.XGeoHashUtils; @@ -27,11 +32,14 @@ import org.apache.lucene.util.XGeoUtils; /** * */ -public final class GeoPoint { +public final class GeoPoint implements Writeable { private double lat; private double lon; private final static double TOLERANCE = XGeoUtils.TOLERANCE; + + // for serialization purposes + private static final GeoPoint PROTOTYPE = new GeoPoint(Double.NaN, Double.NaN); public GeoPoint() { } @@ -51,6 +59,10 @@ public final class GeoPoint { this.lon = lon; } + public GeoPoint(GeoPoint template) { + this(template.getLat(), template.getLon()); + } + public GeoPoint reset(double lat, double lon) { this.lat = lat; this.lon = lon; @@ -152,8 +164,7 @@ public final class GeoPoint { } public static GeoPoint parseFromLatLon(String latLon) { - GeoPoint point = new GeoPoint(); - point.resetFromString(latLon); + GeoPoint point = new GeoPoint(latLon); return point; } @@ -168,4 +179,21 @@ public final class GeoPoint { public static GeoPoint fromIndexLong(long indexLong) { return new GeoPoint().resetFromIndexHash(indexLong); } -} \ No newline at end of file + + @Override + public GeoPoint readFrom(StreamInput in) throws IOException { + double lat = in.readDouble(); + double lon = in.readDouble(); + return new GeoPoint(lat, lon); + } + + public static GeoPoint readGeoPointFrom(StreamInput in) throws IOException { + return PROTOTYPE.readFrom(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeDouble(lat); + out.writeDouble(lon); + } +} diff --git a/core/src/main/java/org/elasticsearch/common/geo/GeoUtils.java b/core/src/main/java/org/elasticsearch/common/geo/GeoUtils.java index be06d4bd0e5..62fe81a5f15 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/GeoUtils.java +++ b/core/src/main/java/org/elasticsearch/common/geo/GeoUtils.java @@ -23,6 +23,7 @@ import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree; import org.apache.lucene.spatial.prefix.tree.QuadPrefixTree; import org.apache.lucene.util.SloppyMath; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.common.Numbers; import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser.Token; @@ -34,10 +35,19 @@ import java.io.IOException; */ public class GeoUtils { + /** Maximum valid latitude in degrees. */ + public static final double MAX_LAT = 90.0; + /** Minimum valid latitude in degrees. */ + public static final double MIN_LAT = -90.0; + /** Maximum valid longitude in degrees. */ + public static final double MAX_LON = 180.0; + /** Minimum valid longitude in degrees. */ + public static final double MIN_LON = -180.0; + public static final String LATITUDE = GeoPointFieldMapper.Names.LAT; public static final String LONGITUDE = GeoPointFieldMapper.Names.LON; public static final String GEOHASH = GeoPointFieldMapper.Names.GEOHASH; - + /** Earth ellipsoid major axis defined by WGS 84 in meters */ public static final double EARTH_SEMI_MAJOR_AXIS = 6378137.0; // meters (WGS 84) @@ -56,6 +66,22 @@ public class GeoUtils { /** Earth ellipsoid polar distance in meters */ public static final double EARTH_POLAR_DISTANCE = Math.PI * EARTH_SEMI_MINOR_AXIS; + /** Returns true if latitude is actually a valid latitude value.*/ + public static boolean isValidLatitude(double latitude) { + if (Double.isNaN(latitude) || Double.isInfinite(latitude) || latitude < GeoUtils.MIN_LAT || latitude > GeoUtils.MAX_LAT) { + return false; + } + return true; + } + + /** Returns true if longitude is actually a valid longitude value. */ + public static boolean isValidLongitude(double longitude) { + if (Double.isNaN(longitude) || Double.isNaN(longitude) || longitude < GeoUtils.MIN_LON || longitude > GeoUtils.MAX_LON) { + return false; + } + return true; + } + /** * Return an approximate value of the diameter of the earth (in meters) at the given latitude (in radians). */ diff --git a/core/src/main/java/org/elasticsearch/common/geo/ShapeRelation.java b/core/src/main/java/org/elasticsearch/common/geo/ShapeRelation.java index 6ee16938d1e..67287b6cb30 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/ShapeRelation.java +++ b/core/src/main/java/org/elasticsearch/common/geo/ShapeRelation.java @@ -19,13 +19,18 @@ package org.elasticsearch.common.geo; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; + +import java.io.IOException; import java.util.Locale; /** * Enum representing the relationship between a Query / Filter Shape and indexed Shapes * that will be used to determine if a Document should be matched or not */ -public enum ShapeRelation { +public enum ShapeRelation implements Writeable{ INTERSECTS("intersects"), DISJOINT("disjoint"), @@ -37,6 +42,20 @@ public enum ShapeRelation { this.relationName = relationName; } + @Override + public ShapeRelation readFrom(StreamInput in) throws IOException { + int ordinal = in.readVInt(); + if (ordinal < 0 || ordinal >= values().length) { + throw new IOException("Unknown ShapeRelation ordinal [" + ordinal + "]"); + } + return values()[ordinal]; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeVInt(ordinal()); + } + public static ShapeRelation getRelationByName(String name) { name = name.toLowerCase(Locale.ENGLISH); for (ShapeRelation relation : ShapeRelation.values()) { diff --git a/core/src/main/java/org/elasticsearch/common/geo/SpatialStrategy.java b/core/src/main/java/org/elasticsearch/common/geo/SpatialStrategy.java index a83f29156a1..23c1dbb43f1 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/SpatialStrategy.java +++ b/core/src/main/java/org/elasticsearch/common/geo/SpatialStrategy.java @@ -18,11 +18,16 @@ */ package org.elasticsearch.common.geo; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; + +import java.io.IOException; /** * */ -public enum SpatialStrategy { +public enum SpatialStrategy implements Writeable { TERM("term"), RECURSIVE("recursive"); @@ -36,4 +41,27 @@ public enum SpatialStrategy { public String getStrategyName() { return strategyName; } + + @Override + public SpatialStrategy readFrom(StreamInput in) throws IOException { + int ordinal = in.readVInt(); + if (ordinal < 0 || ordinal >= values().length) { + throw new IOException("Unknown SpatialStrategy ordinal [" + ordinal + "]"); + } + return values()[ordinal]; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeVInt(ordinal()); + } + + public static SpatialStrategy fromString(String strategyName) { + for (SpatialStrategy strategy : values()) { + if (strategy.strategyName.equals(strategyName)) { + return strategy; + } + } + return null; + } } diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilder.java index 0c1a12de394..2380b975bd1 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilder.java @@ -27,6 +27,7 @@ import com.vividsolutions.jts.geom.Coordinate; import com.vividsolutions.jts.geom.Geometry; import com.vividsolutions.jts.geom.GeometryFactory; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.action.support.ToXContentToBytes; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.unit.DistanceUnit.Distance; @@ -34,7 +35,6 @@ import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.mapper.geo.GeoShapeFieldMapper; import java.io.IOException; @@ -43,7 +43,7 @@ import java.util.*; /** * Basic class for building GeoJSON shapes like Polygons, Linestrings, etc */ -public abstract class ShapeBuilder implements ToXContent { +public abstract class ShapeBuilder extends ToXContentToBytes { protected static final ESLogger LOGGER = ESLoggerFactory.getLogger(ShapeBuilder.class.getName()); @@ -209,16 +209,6 @@ public abstract class ShapeBuilder implements ToXContent { */ public static EnvelopeBuilder newEnvelope(Orientation orientation) { return new EnvelopeBuilder(orientation); } - @Override - public String toString() { - try { - XContentBuilder xcontent = JsonXContent.contentBuilder(); - return toXContent(xcontent, EMPTY_PARAMS).prettyPrint().string(); - } catch (IOException e) { - return super.toString(); - } - } - /** * Create a new Shape from this builder. Since calling this method could change the * defined shape. (by inserting new coordinates or change the position of points) diff --git a/core/src/main/java/org/elasticsearch/common/io/stream/FilterStreamInput.java b/core/src/main/java/org/elasticsearch/common/io/stream/FilterStreamInput.java index 0dac786778f..5f3bd011dd9 100644 --- a/core/src/main/java/org/elasticsearch/common/io/stream/FilterStreamInput.java +++ b/core/src/main/java/org/elasticsearch/common/io/stream/FilterStreamInput.java @@ -68,4 +68,4 @@ public abstract class FilterStreamInput extends StreamInput { public void setVersion(Version version) { delegate.setVersion(version); } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java b/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java index c78e10ce4e3..17d99951209 100644 --- a/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java +++ b/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java @@ -33,6 +33,8 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.text.StringAndBytesText; import org.elasticsearch.common.text.Text; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; @@ -55,8 +57,18 @@ import static org.elasticsearch.ElasticsearchException.readStackTrace; public abstract class StreamInput extends InputStream { + private final NamedWriteableRegistry namedWriteableRegistry; + private Version version = Version.CURRENT; + protected StreamInput() { + this.namedWriteableRegistry = new NamedWriteableRegistry(); + } + + protected StreamInput(NamedWriteableRegistry namedWriteableRegistry) { + this.namedWriteableRegistry = namedWriteableRegistry; + } + public Version getVersion() { return this.version; } @@ -349,6 +361,13 @@ public abstract class StreamInput extends InputStream { return ret; } + public String[] readOptionalStringArray() throws IOException { + if (readBoolean()) { + return readStringArray(); + } + return null; + } + @Nullable @SuppressWarnings("unchecked") public Map readMap() throws IOException { @@ -571,6 +590,20 @@ public abstract class StreamInput extends InputStream { throw new UnsupportedOperationException(); } + /** + * Reads a {@link QueryBuilder} from the current stream + */ + public QueryBuilder readQuery() throws IOException { + return readNamedWriteable(QueryBuilder.class); + } + + /** + * Reads a {@link org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder} from the current stream + */ + public ScoreFunctionBuilder readScoreFunction() throws IOException { + return readNamedWriteable(ScoreFunctionBuilder.class); + } + public static StreamInput wrap(BytesReference reference) { if (reference.hasArray() == false) { reference = reference.toBytesArray(); diff --git a/core/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java b/core/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java index 9621d049b6a..16128e40c64 100644 --- a/core/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java +++ b/core/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java @@ -31,6 +31,8 @@ import org.elasticsearch.Version; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.text.Text; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder; import org.joda.time.ReadableInstant; import java.io.EOFException; @@ -315,6 +317,18 @@ public abstract class StreamOutput extends OutputStream { } } + /** + * Writes a string array, for nullable string, writes false. + */ + public void writeOptionalStringArray(@Nullable String[] array) throws IOException { + if (array == null) { + writeBoolean(false); + } else { + writeBoolean(true); + writeStringArray(array); + } + } + public void writeMap(@Nullable Map map) throws IOException { writeGenericValue(map); } @@ -568,4 +582,18 @@ public abstract class StreamOutput extends OutputStream { writeString(namedWriteable.getWriteableName()); namedWriteable.writeTo(this); } + + /** + * Writes a {@link QueryBuilder} to the current stream + */ + public void writeQuery(QueryBuilder queryBuilder) throws IOException { + writeNamedWriteable(queryBuilder); + } + + /** + * Writes a {@link ScoreFunctionBuilder} to the current stream + */ + public void writeScoreFunction(ScoreFunctionBuilder scoreFunctionBuilder) throws IOException { + writeNamedWriteable(scoreFunctionBuilder); + } } diff --git a/core/src/main/java/org/elasticsearch/common/lucene/search/MoreLikeThisQuery.java b/core/src/main/java/org/elasticsearch/common/lucene/search/MoreLikeThisQuery.java index cdfa9ad9991..410796497d5 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/search/MoreLikeThisQuery.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/search/MoreLikeThisQuery.java @@ -158,7 +158,7 @@ public class MoreLikeThisQuery extends Query { if (this.unlikeText != null || this.unlikeFields != null) { handleUnlike(mlt, this.unlikeText, this.unlikeFields); } - + return createQuery(mlt); } @@ -182,7 +182,7 @@ public class MoreLikeThisQuery extends Query { BooleanQuery bq = bqBuilder.build(); bq.setBoost(getBoost()); - return bq; + return bq; } private void handleUnlike(XMoreLikeThis mlt, String[] unlikeText, Fields[] unlikeFields) throws IOException { @@ -257,8 +257,8 @@ public class MoreLikeThisQuery extends Query { this.unlikeFields = unlikeFields; } - public void setUnlikeText(List unlikeText) { - this.unlikeText = unlikeText.toArray(Strings.EMPTY_ARRAY); + public void setUnlikeText(String[] unlikeText) { + this.unlikeText = unlikeText; } public String[] getMoreLikeFields() { diff --git a/core/src/main/java/org/elasticsearch/common/lucene/search/function/CombineFunction.java b/core/src/main/java/org/elasticsearch/common/lucene/search/function/CombineFunction.java index 41a5b859520..3876e6f3f9c 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/search/function/CombineFunction.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/search/function/CombineFunction.java @@ -20,19 +20,20 @@ package org.elasticsearch.common.lucene.search.function; import org.apache.lucene.search.Explanation; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; -public enum CombineFunction { - MULT { +import java.io.IOException; +import java.util.Locale; + +public enum CombineFunction implements Writeable { + MULTIPLY { @Override public float combine(double queryScore, double funcScore, double maxBoost) { return toFloat(queryScore * Math.min(funcScore, maxBoost)); } - @Override - public String getName() { - return "multiply"; - } - @Override public Explanation explain(Explanation queryExpl, Explanation funcExpl, float maxBoost) { Explanation boostExpl = Explanation.match(maxBoost, "maxBoost"); @@ -50,11 +51,6 @@ public enum CombineFunction { return toFloat(Math.min(funcScore, maxBoost)); } - @Override - public String getName() { - return "replace"; - } - @Override public Explanation explain(Explanation queryExpl, Explanation funcExpl, float maxBoost) { Explanation boostExpl = Explanation.match(maxBoost, "maxBoost"); @@ -71,11 +67,6 @@ public enum CombineFunction { return toFloat(queryScore + Math.min(funcScore, maxBoost)); } - @Override - public String getName() { - return "sum"; - } - @Override public Explanation explain(Explanation queryExpl, Explanation funcExpl, float maxBoost) { Explanation minExpl = Explanation.match(Math.min(funcExpl.getValue(), maxBoost), "min of:", @@ -91,11 +82,6 @@ public enum CombineFunction { return toFloat((Math.min(funcScore, maxBoost) + queryScore) / 2.0); } - @Override - public String getName() { - return "avg"; - } - @Override public Explanation explain(Explanation queryExpl, Explanation funcExpl, float maxBoost) { Explanation minExpl = Explanation.match(Math.min(funcExpl.getValue(), maxBoost), "min of:", @@ -112,11 +98,6 @@ public enum CombineFunction { return toFloat(Math.min(queryScore, Math.min(funcScore, maxBoost))); } - @Override - public String getName() { - return "min"; - } - @Override public Explanation explain(Explanation queryExpl, Explanation funcExpl, float maxBoost) { Explanation innerMinExpl = Explanation.match( @@ -134,11 +115,6 @@ public enum CombineFunction { return toFloat(Math.max(queryScore, Math.min(funcScore, maxBoost))); } - @Override - public String getName() { - return "max"; - } - @Override public Explanation explain(Explanation queryExpl, Explanation funcExpl, float maxBoost) { Explanation innerMinExpl = Explanation.match( @@ -153,8 +129,6 @@ public enum CombineFunction { public abstract float combine(double queryScore, double funcScore, double maxBoost); - public abstract String getName(); - public static float toFloat(double input) { assert deviation(input) <= 0.001 : "input " + input + " out of float scope for function score deviation: " + deviation(input); return (float) input; @@ -166,4 +140,26 @@ public enum CombineFunction { } public abstract Explanation explain(Explanation queryExpl, Explanation funcExpl, float maxBoost); + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeVInt(this.ordinal()); + } + + @Override + public CombineFunction readFrom(StreamInput in) throws IOException { + int ordinal = in.readVInt(); + if (ordinal < 0 || ordinal >= values().length) { + throw new IOException("Unknown CombineFunction ordinal [" + ordinal + "]"); + } + return values()[ordinal]; + } + + public static CombineFunction readCombineFunctionFrom(StreamInput in) throws IOException { + return CombineFunction.MULTIPLY.readFrom(in); + } + + public static CombineFunction fromString(String combineFunction) { + return valueOf(combineFunction.toUpperCase(Locale.ROOT)); + } } diff --git a/core/src/main/java/org/elasticsearch/common/lucene/search/function/FieldValueFactorFunction.java b/core/src/main/java/org/elasticsearch/common/lucene/search/function/FieldValueFactorFunction.java index cb2babb574f..89c5f5f4a9d 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/search/function/FieldValueFactorFunction.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/search/function/FieldValueFactorFunction.java @@ -22,11 +22,16 @@ package org.elasticsearch.common.lucene.search.function; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.Explanation; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.index.fielddata.FieldData; import org.elasticsearch.index.fielddata.IndexNumericFieldData; import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; +import java.io.IOException; import java.util.Locale; +import java.util.Objects; /** * A function_score function that multiplies the score with the value of a @@ -45,7 +50,7 @@ public class FieldValueFactorFunction extends ScoreFunction { public FieldValueFactorFunction(String field, float boostFactor, Modifier modifierType, Double missing, IndexNumericFieldData indexFieldData) { - super(CombineFunction.MULT); + super(CombineFunction.MULTIPLY); this.field = field; this.boostFactor = boostFactor; this.modifier = modifierType; @@ -103,11 +108,19 @@ public class FieldValueFactorFunction extends ScoreFunction { return false; } + @Override + protected boolean doEquals(ScoreFunction other) { + FieldValueFactorFunction fieldValueFactorFunction = (FieldValueFactorFunction) other; + return this.boostFactor == fieldValueFactorFunction.boostFactor && + Objects.equals(this.field, fieldValueFactorFunction.field) && + Objects.equals(this.modifier, fieldValueFactorFunction.modifier); + } + /** * The Type class encapsulates the modification types that can be applied * to the score/value product. */ - public enum Modifier { + public enum Modifier implements Writeable { NONE { @Override public double apply(double n) { @@ -171,9 +184,31 @@ public class FieldValueFactorFunction extends ScoreFunction { public abstract double apply(double n); + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeVInt(this.ordinal()); + } + + public static Modifier readModifierFrom(StreamInput in) throws IOException { + return Modifier.NONE.readFrom(in); + } + + @Override + public Modifier readFrom(StreamInput in) throws IOException { + int ordinal = in.readVInt(); + if (ordinal < 0 || ordinal >= values().length) { + throw new IOException("Unknown Modifier ordinal [" + ordinal + "]"); + } + return values()[ordinal]; + } + @Override public String toString() { return super.toString().toLowerCase(Locale.ROOT); } + + public static Modifier fromString(String modifier) { + return valueOf(modifier.toUpperCase(Locale.ROOT)); + } } } diff --git a/core/src/main/java/org/elasticsearch/common/lucene/search/function/FiltersFunctionScoreQuery.java b/core/src/main/java/org/elasticsearch/common/lucene/search/function/FiltersFunctionScoreQuery.java index ebe25b85d80..7e94792dc5d 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/search/function/FiltersFunctionScoreQuery.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/search/function/FiltersFunctionScoreQuery.java @@ -29,14 +29,13 @@ import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Weight; import org.apache.lucene.util.Bits; import org.apache.lucene.util.ToStringUtils; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.lucene.Lucene; import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.Locale; -import java.util.Set; +import java.util.*; /** * A query that allows for a pluggable boost function / filter. If it matches @@ -55,53 +54,63 @@ public class FiltersFunctionScoreQuery extends Query { @Override public boolean equals(Object o) { - if (this == o) + if (this == o) { return true; - if (o == null || getClass() != o.getClass()) + } + if (o == null || getClass() != o.getClass()) { return false; - + } FilterFunction that = (FilterFunction) o; - - if (filter != null ? !filter.equals(that.filter) : that.filter != null) - return false; - if (function != null ? !function.equals(that.function) : that.function != null) - return false; - - return true; + return Objects.equals(this.filter, that.filter) && Objects.equals(this.function, that.function); } @Override public int hashCode() { - int result = filter != null ? filter.hashCode() : 0; - result = 31 * result + (function != null ? function.hashCode() : 0); - return result; + return Objects.hash(super.hashCode(), filter, function); } } - public static enum ScoreMode { - First, Avg, Max, Sum, Min, Multiply + public enum ScoreMode implements Writeable { + FIRST, AVG, MAX, SUM, MIN, MULTIPLY; + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeVInt(this.ordinal()); + } + + @Override + public ScoreMode readFrom(StreamInput in) throws IOException { + int ordinal = in.readVInt(); + if (ordinal < 0 || ordinal >= values().length) { + throw new IOException("Unknown ScoreMode ordinal [" + ordinal + "]"); + } + return values()[ordinal]; + } + + public static ScoreMode readScoreModeFrom(StreamInput in) throws IOException { + return ScoreMode.MULTIPLY.readFrom(in); + } + + public static ScoreMode fromString(String scoreMode) { + return valueOf(scoreMode.toUpperCase(Locale.ROOT)); + } } Query subQuery; final FilterFunction[] filterFunctions; final ScoreMode scoreMode; final float maxBoost; - private Float minScore; + private final Float minScore; - protected CombineFunction combineFunction; + final protected CombineFunction combineFunction; - public FiltersFunctionScoreQuery(Query subQuery, ScoreMode scoreMode, FilterFunction[] filterFunctions, float maxBoost, Float minScore) { + public FiltersFunctionScoreQuery(Query subQuery, ScoreMode scoreMode, FilterFunction[] filterFunctions, float maxBoost, Float minScore, CombineFunction combineFunction) { this.subQuery = subQuery; this.scoreMode = scoreMode; this.filterFunctions = filterFunctions; this.maxBoost = maxBoost; - combineFunction = CombineFunction.MULT; - this.minScore = minScore; - } - - public FiltersFunctionScoreQuery setCombineFunction(CombineFunction combineFunction) { this.combineFunction = combineFunction; - return this; + this.minScore = minScore; } public Query getSubQuery() { @@ -227,35 +236,34 @@ public class FiltersFunctionScoreQuery extends Query { // filters double factor = 1.0; switch (scoreMode) { - case First: - + case FIRST: factor = filterExplanations.get(0).getValue(); break; - case Max: + case MAX: factor = Double.NEGATIVE_INFINITY; - for (int i = 0; i < filterExplanations.size(); i++) { - factor = Math.max(filterExplanations.get(i).getValue(), factor); + for (Explanation filterExplanation : filterExplanations) { + factor = Math.max(filterExplanation.getValue(), factor); } break; - case Min: + case MIN: factor = Double.POSITIVE_INFINITY; - for (int i = 0; i < filterExplanations.size(); i++) { - factor = Math.min(filterExplanations.get(i).getValue(), factor); + for (Explanation filterExplanation : filterExplanations) { + factor = Math.min(filterExplanation.getValue(), factor); } break; - case Multiply: - for (int i = 0; i < filterExplanations.size(); i++) { - factor *= filterExplanations.get(i).getValue(); + case MULTIPLY: + for (Explanation filterExplanation : filterExplanations) { + factor *= filterExplanation.getValue(); } break; - default: // Avg / Total + default: double totalFactor = 0.0f; - for (int i = 0; i < filterExplanations.size(); i++) { - totalFactor += filterExplanations.get(i).getValue(); + for (Explanation filterExplanation : filterExplanations) { + totalFactor += filterExplanation.getValue(); } if (weightSum != 0) { factor = totalFactor; - if (scoreMode == ScoreMode.Avg) { + if (scoreMode == ScoreMode.AVG) { factor /= weightSum; } } @@ -293,58 +301,64 @@ public class FiltersFunctionScoreQuery extends Query { // be costly to call score(), so we explicitly check if scores // are needed float subQueryScore = needsScores ? scorer.score() : 0f; - if (scoreMode == ScoreMode.First) { - for (int i = 0; i < filterFunctions.length; i++) { - if (docSets[i].get(docId)) { - factor = functions[i].score(docId, subQueryScore); - break; - } - } - } else if (scoreMode == ScoreMode.Max) { - double maxFactor = Double.NEGATIVE_INFINITY; - for (int i = 0; i < filterFunctions.length; i++) { - if (docSets[i].get(docId)) { - maxFactor = Math.max(functions[i].score(docId, subQueryScore), maxFactor); - } - } - if (maxFactor != Float.NEGATIVE_INFINITY) { - factor = maxFactor; - } - } else if (scoreMode == ScoreMode.Min) { - double minFactor = Double.POSITIVE_INFINITY; - for (int i = 0; i < filterFunctions.length; i++) { - if (docSets[i].get(docId)) { - minFactor = Math.min(functions[i].score(docId, subQueryScore), minFactor); - } - } - if (minFactor != Float.POSITIVE_INFINITY) { - factor = minFactor; - } - } else if (scoreMode == ScoreMode.Multiply) { - for (int i = 0; i < filterFunctions.length; i++) { - if (docSets[i].get(docId)) { - factor *= functions[i].score(docId, subQueryScore); - } - } - } else { // Avg / Total - double totalFactor = 0.0f; - float weightSum = 0; - for (int i = 0; i < filterFunctions.length; i++) { - if (docSets[i].get(docId)) { - totalFactor += functions[i].score(docId, subQueryScore); - if (filterFunctions[i].function instanceof WeightFactorFunction) { - weightSum+= ((WeightFactorFunction)filterFunctions[i].function).getWeight(); - } else { - weightSum++; + switch(scoreMode) { + case FIRST: + for (int i = 0; i < filterFunctions.length; i++) { + if (docSets[i].get(docId)) { + factor = functions[i].score(docId, subQueryScore); + break; } } - } - if (weightSum != 0) { - factor = totalFactor; - if (scoreMode == ScoreMode.Avg) { - factor /= weightSum; + break; + case MAX: + double maxFactor = Double.NEGATIVE_INFINITY; + for (int i = 0; i < filterFunctions.length; i++) { + if (docSets[i].get(docId)) { + maxFactor = Math.max(functions[i].score(docId, subQueryScore), maxFactor); + } } - } + if (maxFactor != Float.NEGATIVE_INFINITY) { + factor = maxFactor; + } + break; + case MIN: + double minFactor = Double.POSITIVE_INFINITY; + for (int i = 0; i < filterFunctions.length; i++) { + if (docSets[i].get(docId)) { + minFactor = Math.min(functions[i].score(docId, subQueryScore), minFactor); + } + } + if (minFactor != Float.POSITIVE_INFINITY) { + factor = minFactor; + } + break; + case MULTIPLY: + for (int i = 0; i < filterFunctions.length; i++) { + if (docSets[i].get(docId)) { + factor *= functions[i].score(docId, subQueryScore); + } + } + break; + default: // Avg / Total + double totalFactor = 0.0f; + float weightSum = 0; + for (int i = 0; i < filterFunctions.length; i++) { + if (docSets[i].get(docId)) { + totalFactor += functions[i].score(docId, subQueryScore); + if (filterFunctions[i].function instanceof WeightFactorFunction) { + weightSum+= ((WeightFactorFunction)filterFunctions[i].function).getWeight(); + } else { + weightSum++; + } + } + } + if (weightSum != 0) { + factor = totalFactor; + if (scoreMode == ScoreMode.AVG) { + factor /= weightSum; + } + } + break; } return scoreCombiner.combine(subQueryScore, factor, maxBoost); } @@ -364,19 +378,20 @@ public class FiltersFunctionScoreQuery extends Query { @Override public boolean equals(Object o) { - if (o == null || getClass() != o.getClass()) - return false; - FiltersFunctionScoreQuery other = (FiltersFunctionScoreQuery) o; - if (this.getBoost() != other.getBoost()) - return false; - if (!this.subQuery.equals(other.subQuery)) { + if (this == o) { + return true; + } + if (super.equals(o) == false) { return false; } - return Arrays.equals(this.filterFunctions, other.filterFunctions); + FiltersFunctionScoreQuery other = (FiltersFunctionScoreQuery) o; + return Objects.equals(this.subQuery, other.subQuery) && this.maxBoost == other.maxBoost && + Objects.equals(this.combineFunction, other.combineFunction) && Objects.equals(this.minScore, other.minScore) && + Arrays.equals(this.filterFunctions, other.filterFunctions); } @Override public int hashCode() { - return subQuery.hashCode() + 31 * Arrays.hashCode(filterFunctions) ^ Float.floatToIntBits(getBoost()); + return Objects.hash(super.hashCode(), subQuery, maxBoost, combineFunction, minScore, filterFunctions); } } diff --git a/core/src/main/java/org/elasticsearch/common/lucene/search/function/FunctionScoreQuery.java b/core/src/main/java/org/elasticsearch/common/lucene/search/function/FunctionScoreQuery.java index 2a8829632d4..907d66957ac 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/search/function/FunctionScoreQuery.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/search/function/FunctionScoreQuery.java @@ -23,7 +23,6 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.Term; import org.apache.lucene.search.*; -import org.apache.lucene.util.Bits; import org.apache.lucene.util.ToStringUtils; import java.io.IOException; @@ -35,31 +34,27 @@ import java.util.Set; */ public class FunctionScoreQuery extends Query { + public static final float DEFAULT_MAX_BOOST = Float.MAX_VALUE; + Query subQuery; final ScoreFunction function; - float maxBoost = Float.MAX_VALUE; - CombineFunction combineFunction; - private Float minScore = null; + final float maxBoost; + final CombineFunction combineFunction; + private Float minScore; - public FunctionScoreQuery(Query subQuery, ScoreFunction function, Float minScore) { + public FunctionScoreQuery(Query subQuery, ScoreFunction function, Float minScore, CombineFunction combineFunction, float maxBoost) { this.subQuery = subQuery; this.function = function; - this.combineFunction = function == null? CombineFunction.MULT : function.getDefaultScoreCombiner(); + this.combineFunction = combineFunction; this.minScore = minScore; + this.maxBoost = maxBoost; } public FunctionScoreQuery(Query subQuery, ScoreFunction function) { this.subQuery = subQuery; this.function = function; this.combineFunction = function.getDefaultScoreCombiner(); - } - - public void setCombineFunction(CombineFunction combineFunction) { - this.combineFunction = combineFunction; - } - - public void setMaxBoost(float maxBoost) { - this.maxBoost = maxBoost; + this.maxBoost = DEFAULT_MAX_BOOST; } public float getMaxBoost() { @@ -193,15 +188,20 @@ public class FunctionScoreQuery extends Query { @Override public boolean equals(Object o) { - if (o == null || getClass() != o.getClass()) + if (this == o) { + return true; + } + if (super.equals(o) == false) { return false; + } FunctionScoreQuery other = (FunctionScoreQuery) o; - return this.getBoost() == other.getBoost() && this.subQuery.equals(other.subQuery) && (this.function != null ? this.function.equals(other.function) : other.function == null) - && this.maxBoost == other.maxBoost; + return Objects.equals(this.subQuery, other.subQuery) && Objects.equals(this.function, other.function) + && Objects.equals(this.combineFunction, other.combineFunction) + && Objects.equals(this.minScore, other.minScore) && this.maxBoost == other.maxBoost; } @Override public int hashCode() { - return subQuery.hashCode() + 31 * Objects.hashCode(function) ^ Float.floatToIntBits(getBoost()); + return Objects.hash(super.hashCode(), subQuery.hashCode(), function, combineFunction, minScore, maxBoost); } } diff --git a/core/src/main/java/org/elasticsearch/common/lucene/search/function/RandomScoreFunction.java b/core/src/main/java/org/elasticsearch/common/lucene/search/function/RandomScoreFunction.java index bc1962ad0b1..cb34cbc515c 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/search/function/RandomScoreFunction.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/search/function/RandomScoreFunction.java @@ -38,7 +38,7 @@ public class RandomScoreFunction extends ScoreFunction { * Default constructor. Only useful for constructing as a placeholder, but should not be used for actual scoring. */ public RandomScoreFunction() { - super(CombineFunction.MULT); + super(CombineFunction.MULTIPLY); uidFieldData = null; } @@ -50,7 +50,7 @@ public class RandomScoreFunction extends ScoreFunction { * @param uidFieldData The field data for _uid to use for generating consistent random values for the same id */ public RandomScoreFunction(int seed, int salt, IndexFieldData uidFieldData) { - super(CombineFunction.MULT); + super(CombineFunction.MULTIPLY); this.originalSeed = seed; this.saltedSeed = seed ^ salt; this.uidFieldData = uidFieldData; @@ -85,4 +85,11 @@ public class RandomScoreFunction extends ScoreFunction { public boolean needsScores() { return false; } + + @Override + protected boolean doEquals(ScoreFunction other) { + RandomScoreFunction randomScoreFunction = (RandomScoreFunction) other; + return this.originalSeed == randomScoreFunction.originalSeed && + this.saltedSeed == randomScoreFunction.saltedSeed; + } } diff --git a/core/src/main/java/org/elasticsearch/common/lucene/search/function/ScoreFunction.java b/core/src/main/java/org/elasticsearch/common/lucene/search/function/ScoreFunction.java index 1f12336b2f7..f4551d4dce8 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/search/function/ScoreFunction.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/search/function/ScoreFunction.java @@ -22,6 +22,7 @@ package org.elasticsearch.common.lucene.search.function; import org.apache.lucene.index.LeafReaderContext; import java.io.IOException; +import java.util.Objects; /** * @@ -46,4 +47,23 @@ public abstract class ScoreFunction { * @return {@code true} if scores are needed. */ public abstract boolean needsScores(); + + @Override + public final boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + + ScoreFunction other = (ScoreFunction) obj; + return Objects.equals(scoreCombiner, other.scoreCombiner) && + doEquals(other); + } + + /** + * Indicates whether some other {@link ScoreFunction} object of the same type is "equal to" this one. + */ + protected abstract boolean doEquals(ScoreFunction other); } diff --git a/core/src/main/java/org/elasticsearch/common/lucene/search/function/ScriptScoreFunction.java b/core/src/main/java/org/elasticsearch/common/lucene/search/function/ScriptScoreFunction.java index 1ff3cddf9f1..a715c61a7dd 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/search/function/ScriptScoreFunction.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/search/function/ScriptScoreFunction.java @@ -29,6 +29,7 @@ import org.elasticsearch.script.ScriptException; import org.elasticsearch.script.SearchScript; import java.io.IOException; +import java.util.Objects; public class ScriptScoreFunction extends ScoreFunction { @@ -136,4 +137,9 @@ public class ScriptScoreFunction extends ScoreFunction { return "script" + sScript.toString(); } + @Override + protected boolean doEquals(ScoreFunction other) { + ScriptScoreFunction scriptScoreFunction = (ScriptScoreFunction) other; + return Objects.equals(this.sScript, scriptScoreFunction.sScript); + } } \ No newline at end of file diff --git a/core/src/main/java/org/elasticsearch/common/lucene/search/function/WeightFactorFunction.java b/core/src/main/java/org/elasticsearch/common/lucene/search/function/WeightFactorFunction.java index c585da42814..fa70480881a 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/search/function/WeightFactorFunction.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/search/function/WeightFactorFunction.java @@ -23,18 +23,19 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.Explanation; import java.io.IOException; +import java.util.Objects; /** * */ public class WeightFactorFunction extends ScoreFunction { - private static final ScoreFunction SCORE_ONE = new ScoreOne(CombineFunction.MULT); + private static final ScoreFunction SCORE_ONE = new ScoreOne(CombineFunction.MULTIPLY); private final ScoreFunction scoreFunction; private float weight = 1.0f; public WeightFactorFunction(float weight, ScoreFunction scoreFunction) { - super(CombineFunction.MULT); + super(CombineFunction.MULTIPLY); if (scoreFunction == null) { this.scoreFunction = SCORE_ONE; } else { @@ -44,7 +45,7 @@ public class WeightFactorFunction extends ScoreFunction { } public WeightFactorFunction(float weight) { - super(CombineFunction.MULT); + super(CombineFunction.MULTIPLY); this.scoreFunction = SCORE_ONE; this.weight = weight; } @@ -81,6 +82,17 @@ public class WeightFactorFunction extends ScoreFunction { return weight; } + public ScoreFunction getScoreFunction() { + return scoreFunction; + } + + @Override + protected boolean doEquals(ScoreFunction other) { + WeightFactorFunction weightFactorFunction = (WeightFactorFunction) other; + return this.weight == weightFactorFunction.weight && + Objects.equals(this.scoreFunction, weightFactorFunction.scoreFunction); + } + private static class ScoreOne extends ScoreFunction { protected ScoreOne(CombineFunction scoreCombiner) { @@ -106,5 +118,10 @@ public class WeightFactorFunction extends ScoreFunction { public boolean needsScores() { return false; } + + @Override + protected boolean doEquals(ScoreFunction other) { + return true; + } } } diff --git a/core/src/main/java/org/elasticsearch/common/path/PathTrie.java b/core/src/main/java/org/elasticsearch/common/path/PathTrie.java index 0cc1d09c997..3bf2a9b17ee 100644 --- a/core/src/main/java/org/elasticsearch/common/path/PathTrie.java +++ b/core/src/main/java/org/elasticsearch/common/path/PathTrie.java @@ -22,6 +22,8 @@ package org.elasticsearch.common.path; import com.google.common.collect.ImmutableMap; import org.elasticsearch.common.Strings; +import java.util.ArrayList; +import java.util.List; import java.util.Map; import static org.elasticsearch.common.collect.MapBuilder.newMapBuilder; @@ -195,7 +197,7 @@ public class PathTrie { private void put(Map params, TrieNode node, String value) { if (params != null && node.isNamedWildcard()) { - params.put(node.namedWildcard(), decoder.decode(value)); + params.put(node.namedWildcard(), value); } } } @@ -222,7 +224,7 @@ public class PathTrie { if (path.length() == 0) { return rootValue; } - String[] strings = Strings.splitStringToArray(path, separator); + String[] strings = splitPath(decoder.decode(path)); if (strings.length == 0) { return rootValue; } @@ -233,4 +235,50 @@ public class PathTrie { } return root.retrieve(strings, index, params); } + + /* + Splits up the url path up by '/' and is aware of + index name expressions that appear between '<' and '>'. + */ + String[] splitPath(final String path) { + if (path == null || path.length() == 0) { + return Strings.EMPTY_ARRAY; + } + int count = 1; + boolean splitAllowed = true; + for (int i = 0; i < path.length(); i++) { + final char currentC = path.charAt(i); + if ('<' == currentC) { + splitAllowed = false; + } else if (currentC == '>') { + splitAllowed = true; + } else if (splitAllowed && currentC == separator) { + count++; + } + } + + final List result = new ArrayList<>(count); + final StringBuilder builder = new StringBuilder(); + + splitAllowed = true; + for (int i = 0; i < path.length(); i++) { + final char currentC = path.charAt(i); + if ('<' == currentC) { + splitAllowed = false; + } else if (currentC == '>') { + splitAllowed = true; + } else if (splitAllowed && currentC == separator) { + if (builder.length() > 0) { + result.add(builder.toString()); + builder.setLength(0); + } + continue; + } + builder.append(currentC); + } + if (builder.length() > 0) { + result.add(builder.toString()); + } + return result.toArray(new String[result.size()]); + } } diff --git a/core/src/main/java/org/elasticsearch/common/unit/Fuzziness.java b/core/src/main/java/org/elasticsearch/common/unit/Fuzziness.java index 3b4a0733ec5..18641b8e418 100644 --- a/core/src/main/java/org/elasticsearch/common/unit/Fuzziness.java +++ b/core/src/main/java/org/elasticsearch/common/unit/Fuzziness.java @@ -19,19 +19,24 @@ package org.elasticsearch.common.unit; import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; +import java.util.Locale; +import java.util.Objects; /** * A unit class that encapsulates all in-exact search * parsing and conversion from similarities to edit distances * etc. */ -public final class Fuzziness implements ToXContent { +public final class Fuzziness implements ToXContent, Writeable { public static final XContentBuilderString X_FIELD_NAME = new XContentBuilderString("fuzziness"); public static final Fuzziness ZERO = new Fuzziness(0); @@ -42,6 +47,10 @@ public final class Fuzziness implements ToXContent { private final String fuzziness; + /** the prototype constant is intended for deserialization when used with + * {@link org.elasticsearch.common.io.stream.StreamableReader#readFrom(StreamInput)} */ + static final Fuzziness PROTOTYPE = AUTO; + private Fuzziness(int fuzziness) { if (fuzziness != 0 && fuzziness != 1 && fuzziness != 2) { throw new IllegalArgumentException("Valid edit distances are [0, 1, 2] but was [" + fuzziness + "]"); @@ -50,7 +59,10 @@ public final class Fuzziness implements ToXContent { } private Fuzziness(String fuzziness) { - this.fuzziness = fuzziness; + if (fuzziness == null) { + throw new IllegalArgumentException("fuzziness can't be null!"); + } + this.fuzziness = fuzziness.toUpperCase(Locale.ROOT); } /** @@ -120,7 +132,7 @@ public final class Fuzziness implements ToXContent { } public int asDistance(String text) { - if (this == AUTO) { //AUTO + if (this.equals(AUTO)) { //AUTO final int len = termLen(text); if (len <= 2) { return 0; @@ -134,7 +146,7 @@ public final class Fuzziness implements ToXContent { } public TimeValue asTimeValue() { - if (this == AUTO) { + if (this.equals(AUTO)) { return TimeValue.timeValueMillis(1); } else { return TimeValue.parseTimeValue(fuzziness.toString(), null, "fuzziness"); @@ -142,7 +154,7 @@ public final class Fuzziness implements ToXContent { } public long asLong() { - if (this == AUTO) { + if (this.equals(AUTO)) { return 1; } try { @@ -153,7 +165,7 @@ public final class Fuzziness implements ToXContent { } public int asInt() { - if (this == AUTO) { + if (this.equals(AUTO)) { return 1; } try { @@ -164,7 +176,7 @@ public final class Fuzziness implements ToXContent { } public short asShort() { - if (this == AUTO) { + if (this.equals(AUTO)) { return 1; } try { @@ -175,7 +187,7 @@ public final class Fuzziness implements ToXContent { } public byte asByte() { - if (this == AUTO) { + if (this.equals(AUTO)) { return 1; } try { @@ -186,14 +198,14 @@ public final class Fuzziness implements ToXContent { } public double asDouble() { - if (this == AUTO) { + if (this.equals(AUTO)) { return 1d; } return Double.parseDouble(fuzziness.toString()); } public float asFloat() { - if (this == AUTO) { + if (this.equals(AUTO)) { return 1f; } return Float.parseFloat(fuzziness.toString()); @@ -206,4 +218,35 @@ public final class Fuzziness implements ToXContent { public String asString() { return fuzziness.toString(); } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + Fuzziness other = (Fuzziness) obj; + return Objects.equals(fuzziness, other.fuzziness); + } + + @Override + public int hashCode() { + return fuzziness.hashCode(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(fuzziness); + } + + @Override + public Fuzziness readFrom(StreamInput in) throws IOException { + return new Fuzziness(in.readString()); + } + + public static Fuzziness readFuzzinessFrom(StreamInput in) throws IOException { + return PROTOTYPE.readFrom(in); + } } diff --git a/core/src/main/java/org/elasticsearch/common/xcontent/ObjectParser.java b/core/src/main/java/org/elasticsearch/common/xcontent/ObjectParser.java index 059a706ce19..95def1161c4 100644 --- a/core/src/main/java/org/elasticsearch/common/xcontent/ObjectParser.java +++ b/core/src/main/java/org/elasticsearch/common/xcontent/ObjectParser.java @@ -21,7 +21,6 @@ package org.elasticsearch.common.xcontent; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.ParsingException; -import org.elasticsearch.index.Index; import java.io.IOException; import java.util.*; @@ -130,7 +129,7 @@ public final class ObjectParser implements BiFunction implements BiFunction { INTERNAL((byte) 0) { @Override public boolean isVersionConflictForWrites(long currentVersion, long expectedVersion) { @@ -219,6 +224,8 @@ public enum VersionType { private final byte value; + private static final VersionType PROTOTYPE = INTERNAL; + VersionType(byte value) { this.value = value; } @@ -304,4 +311,20 @@ public enum VersionType { } throw new IllegalArgumentException("No version type match [" + value + "]"); } + + @Override + public VersionType readFrom(StreamInput in) throws IOException { + int ordinal = in.readVInt(); + assert (ordinal == 0 || ordinal == 1 || ordinal == 2 || ordinal == 3); + return VersionType.values()[ordinal]; + } + + public static VersionType readVersionTypeFrom(StreamInput in) throws IOException { + return PROTOTYPE.readFrom(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeVInt(ordinal()); + } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java b/core/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java index 1d34e4ee3f0..2e1f9df2d0d 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java @@ -33,7 +33,7 @@ import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.fielddata.FieldDataType; -import org.elasticsearch.index.query.QueryParseContext; +import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.similarity.SimilarityProvider; import java.io.IOException; @@ -437,7 +437,7 @@ public abstract class MappedFieldType extends FieldType { } /** - * Should the field query {@link #termQuery(Object, org.elasticsearch.index.query.QueryParseContext)} be used when detecting this + * Should the field query {@link #termQuery(Object, org.elasticsearch.index.query.QueryShardContext)} be used when detecting this * field in query string. */ public boolean useTermQueryWithQueryString() { @@ -449,11 +449,11 @@ public abstract class MappedFieldType extends FieldType { return new Term(names().indexName(), indexedValueForSearch(value)); } - public Query termQuery(Object value, @Nullable QueryParseContext context) { + public Query termQuery(Object value, @Nullable QueryShardContext context) { return new TermQuery(createTerm(value)); } - public Query termsQuery(List values, @Nullable QueryParseContext context) { + public Query termsQuery(List values, @Nullable QueryShardContext context) { BytesRef[] bytesRefs = new BytesRef[values.size()]; for (int i = 0; i < bytesRefs.length; i++) { bytesRefs[i] = indexedValueForSearch(values.get(i)); @@ -472,7 +472,7 @@ public abstract class MappedFieldType extends FieldType { return new FuzzyQuery(createTerm(value), fuzziness.asDistance(BytesRefs.toString(value)), prefixLength, maxExpansions, transpositions); } - public Query prefixQuery(String value, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryParseContext context) { + public Query prefixQuery(String value, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryShardContext context) { PrefixQuery query = new PrefixQuery(createTerm(value)); if (method != null) { query.setRewriteMethod(method); @@ -480,7 +480,7 @@ public abstract class MappedFieldType extends FieldType { return query; } - public Query regexpQuery(String value, int flags, int maxDeterminizedStates, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryParseContext context) { + public Query regexpQuery(String value, int flags, int maxDeterminizedStates, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryShardContext context) { RegexpQuery query = new RegexpQuery(createTerm(value), flags, maxDeterminizedStates); if (method != null) { query.setRewriteMethod(method); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java index 770df633b9d..6d385875b18 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java @@ -125,6 +125,7 @@ public class GeoShapeFieldMapper extends FieldMapper { super(name, Defaults.FIELD_TYPE); } + @Override public GeoShapeFieldType fieldType() { return (GeoShapeFieldType)fieldType; } @@ -400,6 +401,10 @@ public class GeoShapeFieldMapper extends FieldMapper { return this.defaultStrategy; } + public PrefixTreeStrategy resolveStrategy(SpatialStrategy strategy) { + return resolveStrategy(strategy.getStrategyName()); + } + public PrefixTreeStrategy resolveStrategy(String strategyName) { if (SpatialStrategy.RECURSIVE.getStrategyName().equals(strategyName)) { recursiveStrategy.setPointsOnly(pointsOnly()); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/AllFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/AllFieldMapper.java index f872207c686..e538a00da16 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/AllFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/AllFieldMapper.java @@ -40,7 +40,7 @@ import org.elasticsearch.index.mapper.MergeMappingException; import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.ParseContext; -import org.elasticsearch.index.query.QueryParseContext; +import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.similarity.SimilarityLookupService; import java.io.IOException; @@ -186,7 +186,7 @@ public class AllFieldMapper extends MetadataFieldMapper { } @Override - public Query termQuery(Object value, QueryParseContext context) { + public Query termQuery(Object value, QueryShardContext context) { return queryStringTermQuery(createTerm(value)); } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/IdFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/IdFieldMapper.java index c21e07c4cfc..70948b154ed 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/IdFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/IdFieldMapper.java @@ -24,7 +24,12 @@ import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.Term; import org.apache.lucene.queries.TermsQuery; -import org.apache.lucene.search.*; +import org.apache.lucene.search.BooleanClause; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.MultiTermQuery; +import org.apache.lucene.search.PrefixQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.RegexpQuery; import org.apache.lucene.util.BytesRef; import org.elasticsearch.Version; import org.elasticsearch.common.Nullable; @@ -36,8 +41,15 @@ import org.elasticsearch.common.util.iterable.Iterables; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.fielddata.FieldDataType; -import org.elasticsearch.index.mapper.*; -import org.elasticsearch.index.query.QueryParseContext; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.Mapper; +import org.elasticsearch.index.mapper.MapperParsingException; +import org.elasticsearch.index.mapper.MergeMappingException; +import org.elasticsearch.index.mapper.MergeResult; +import org.elasticsearch.index.mapper.MetadataFieldMapper; +import org.elasticsearch.index.mapper.ParseContext; +import org.elasticsearch.index.mapper.Uid; +import org.elasticsearch.index.query.QueryShardContext; import java.io.IOException; import java.util.Collection; @@ -48,7 +60,7 @@ import java.util.Map; import static org.elasticsearch.index.mapper.core.TypeParsers.parseField; /** - * + * */ public class IdFieldMapper extends MetadataFieldMapper { @@ -155,7 +167,7 @@ public class IdFieldMapper extends MetadataFieldMapper { } @Override - public Query termQuery(Object value, @Nullable QueryParseContext context) { + public Query termQuery(Object value, @Nullable QueryShardContext context) { if (indexOptions() != IndexOptions.NONE || context == null) { return super.termQuery(value, context); } @@ -164,7 +176,7 @@ public class IdFieldMapper extends MetadataFieldMapper { } @Override - public Query termsQuery(List values, @Nullable QueryParseContext context) { + public Query termsQuery(List values, @Nullable QueryShardContext context) { if (indexOptions() != IndexOptions.NONE || context == null) { return super.termsQuery(values, context); } @@ -172,7 +184,7 @@ public class IdFieldMapper extends MetadataFieldMapper { } @Override - public Query prefixQuery(String value, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryParseContext context) { + public Query prefixQuery(String value, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryShardContext context) { if (indexOptions() != IndexOptions.NONE || context == null) { return super.prefixQuery(value, method, context); } @@ -189,7 +201,7 @@ public class IdFieldMapper extends MetadataFieldMapper { } @Override - public Query regexpQuery(String value, int flags, int maxDeterminizedStates, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryParseContext context) { + public Query regexpQuery(String value, int flags, int maxDeterminizedStates, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryShardContext context) { if (indexOptions() != IndexOptions.NONE || context == null) { return super.regexpQuery(value, flags, maxDeterminizedStates, method, context); } @@ -224,7 +236,7 @@ public class IdFieldMapper extends MetadataFieldMapper { super(NAME, fieldType, Defaults.FIELD_TYPE, indexSettings); this.path = path; } - + private static MappedFieldType idFieldType(Settings indexSettings, MappedFieldType existing) { if (existing != null) { return existing.clone(); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/IndexFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/IndexFieldMapper.java index 3f395a8e0d1..1b7168a2d1c 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/IndexFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/IndexFieldMapper.java @@ -38,7 +38,7 @@ import org.elasticsearch.index.mapper.MergeMappingException; import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.ParseContext; -import org.elasticsearch.index.query.QueryParseContext; +import org.elasticsearch.index.query.QueryShardContext; import java.io.IOException; import java.util.Iterator; @@ -157,7 +157,7 @@ public class IndexFieldMapper extends MetadataFieldMapper { * indices */ @Override - public Query termQuery(Object value, @Nullable QueryParseContext context) { + public Query termQuery(Object value, @Nullable QueryShardContext context) { if (context == null) { return super.termQuery(value, context); } @@ -171,7 +171,7 @@ public class IndexFieldMapper extends MetadataFieldMapper { @Override - public Query termsQuery(List values, QueryParseContext context) { + public Query termsQuery(List values, QueryShardContext context) { if (context == null) { return super.termsQuery(values, context); } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/ParentFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/ParentFieldMapper.java index 70c1de69696..ca792b8705b 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/ParentFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/ParentFieldMapper.java @@ -34,8 +34,16 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.loader.SettingsLoader; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.fielddata.FieldDataType; -import org.elasticsearch.index.mapper.*; -import org.elasticsearch.index.query.QueryParseContext; +import org.elasticsearch.index.mapper.DocumentMapper; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.Mapper; +import org.elasticsearch.index.mapper.MapperParsingException; +import org.elasticsearch.index.mapper.MergeMappingException; +import org.elasticsearch.index.mapper.MergeResult; +import org.elasticsearch.index.mapper.MetadataFieldMapper; +import org.elasticsearch.index.mapper.ParseContext; +import org.elasticsearch.index.mapper.Uid; +import org.elasticsearch.index.query.QueryShardContext; import java.io.IOException; import java.util.ArrayList; @@ -202,12 +210,12 @@ public class ParentFieldMapper extends MetadataFieldMapper { } @Override - public Query termQuery(Object value, @Nullable QueryParseContext context) { + public Query termQuery(Object value, @Nullable QueryShardContext context) { return termsQuery(Collections.singletonList(value), context); } @Override - public Query termsQuery(List values, @Nullable QueryParseContext context) { + public Query termsQuery(List values, @Nullable QueryShardContext context) { if (context == null) { return super.termsQuery(values, context); } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/TypeFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/TypeFieldMapper.java index 480d2a41818..12e40dec47a 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/TypeFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/TypeFieldMapper.java @@ -43,7 +43,7 @@ import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.Uid; -import org.elasticsearch.index.query.QueryParseContext; +import org.elasticsearch.index.query.QueryShardContext; import java.io.IOException; import java.util.List; @@ -137,7 +137,7 @@ public class TypeFieldMapper extends MetadataFieldMapper { } @Override - public Query termQuery(Object value, @Nullable QueryParseContext context) { + public Query termQuery(Object value, @Nullable QueryShardContext context) { if (indexOptions() == IndexOptions.NONE) { return new ConstantScoreQuery(new PrefixQuery(new Term(UidFieldMapper.NAME, Uid.typePrefixAsBytes(BytesRefs.toBytesRef(value))))); } diff --git a/core/src/main/java/org/elasticsearch/index/percolator/PercolatorQueriesRegistry.java b/core/src/main/java/org/elasticsearch/index/percolator/PercolatorQueriesRegistry.java index 9740054c183..06d04f60d61 100644 --- a/core/src/main/java/org/elasticsearch/index/percolator/PercolatorQueriesRegistry.java +++ b/core/src/main/java/org/elasticsearch/index/percolator/PercolatorQueriesRegistry.java @@ -25,6 +25,7 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; import org.apache.lucene.util.BytesRef; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; @@ -42,8 +43,7 @@ import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.internal.TypeFieldMapper; import org.elasticsearch.index.percolator.stats.ShardPercolateService; import org.elasticsearch.index.query.IndexQueryParserService; -import org.elasticsearch.index.query.QueryParseContext; -import org.elasticsearch.common.ParsingException; +import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.settings.IndexSettings; import org.elasticsearch.index.shard.AbstractIndexShardComponent; import org.elasticsearch.index.shard.IndexShard; @@ -187,9 +187,9 @@ public class PercolatorQueriesRegistry extends AbstractIndexShardComponent imple private Query parseQuery(String type, XContentParser parser) { String[] previousTypes = null; if (type != null) { - QueryParseContext.setTypesWithPrevious(new String[]{type}); + QueryShardContext.setTypesWithPrevious(new String[]{type}); } - QueryParseContext context = queryParserService.getParseContext(); + QueryShardContext context = queryParserService.getShardContext(); try { context.reset(parser); // This means that fields in the query need to exist in the mapping prior to registering this query @@ -205,13 +205,13 @@ public class PercolatorQueriesRegistry extends AbstractIndexShardComponent imple // if index.percolator.map_unmapped_fields_as_string is set to true, query can contain unmapped fields which will be mapped // as an analyzed string. context.setAllowUnmappedFields(false); - context.setMapUnmappedFieldAsString(mapUnmappedFieldsAsString ? true : false); + context.setMapUnmappedFieldAsString(mapUnmappedFieldsAsString); return queryParserService.parseInnerQuery(context); } catch (IOException e) { - throw new ParsingException(context, "Failed to parse", e); + throw new ParsingException(parser.getTokenLocation(), "Failed to parse", e); } finally { if (type != null) { - QueryParseContext.setTypes(previousTypes); + QueryShardContext.setTypes(previousTypes); } context.reset(null); } diff --git a/core/src/main/java/org/elasticsearch/index/query/AbstractQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/AbstractQueryBuilder.java new file mode 100644 index 00000000000..560476a69d8 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/index/query/AbstractQueryBuilder.java @@ -0,0 +1,266 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query; + +import org.apache.lucene.search.Query; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.action.support.ToXContentToBytes; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.lucene.BytesRefs; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentType; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.Objects; + +/** + * Base class for all classes producing lucene queries. + * Supports conversion to BytesReference and creation of lucene Query objects. + */ +public abstract class AbstractQueryBuilder extends ToXContentToBytes implements QueryBuilder { + + /** Default for boost to apply to resulting Lucene query. Defaults to 1.0*/ + public static final float DEFAULT_BOOST = 1.0f; + public static final ParseField NAME_FIELD = new ParseField("_name"); + public static final ParseField BOOST_FIELD = new ParseField("boost"); + + protected String queryName; + protected float boost = DEFAULT_BOOST; + + protected AbstractQueryBuilder() { + super(XContentType.JSON); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + doXContent(builder, params); + builder.endObject(); + return builder; + } + + protected abstract void doXContent(XContentBuilder builder, Params params) throws IOException; + + protected void printBoostAndQueryName(XContentBuilder builder) throws IOException { + builder.field("boost", boost); + if (queryName != null) { + builder.field("_name", queryName); + } + } + + @Override + public final Query toQuery(QueryShardContext context) throws IOException { + Query query = doToQuery(context); + if (query != null) { + setFinalBoost(query); + if (queryName != null) { + context.addNamedQuery(queryName, query); + } + } + return query; + } + + /** + * Sets the main boost to the query obtained by converting the current query into a lucene query. + * The default behaviour is to set the main boost, after verifying that we are not overriding any non default boost + * value that was previously set to the lucene query. That case would require some manual decision on how to combine + * the main boost with the boost coming from lucene by overriding this method. + * @throws IllegalStateException if the lucene query boost has already been set + */ + protected void setFinalBoost(Query query) { + if (query.getBoost() != AbstractQueryBuilder.DEFAULT_BOOST) { + throw new IllegalStateException("lucene query boost is already set, override setFinalBoost to define how to combine lucene boost with main boost"); + } + query.setBoost(boost); + } + + @Override + public final Query toFilter(QueryShardContext context) throws IOException { + Query result = null; + final boolean originalIsFilter = context.isFilter; + try { + context.isFilter = true; + result = toQuery(context); + } finally { + context.isFilter = originalIsFilter; + } + return result; + } + + protected abstract Query doToQuery(QueryShardContext context) throws IOException; + + /** + * Returns the query name for the query. + */ + @SuppressWarnings("unchecked") + @Override + public final QB queryName(String queryName) { + this.queryName = queryName; + return (QB) this; + } + + /** + * Sets the query name for the query. + */ + @Override + public final String queryName() { + return queryName; + } + + /** + * Returns the boost for this query. + */ + @Override + public final float boost() { + return this.boost; + } + + /** + * Sets the boost for this query. Documents matching this query will (in addition to the normal + * weightings) have their score multiplied by the boost provided. + */ + @SuppressWarnings("unchecked") + @Override + public final QB boost(float boost) { + this.boost = boost; + return (QB) this; + } + + @Override + public final QB readFrom(StreamInput in) throws IOException { + QB queryBuilder = doReadFrom(in); + queryBuilder.boost = in.readFloat(); + queryBuilder.queryName = in.readOptionalString(); + return queryBuilder; + } + + protected abstract QB doReadFrom(StreamInput in) throws IOException; + + @Override + public final void writeTo(StreamOutput out) throws IOException { + doWriteTo(out); + out.writeFloat(boost); + out.writeOptionalString(queryName); + } + + protected abstract void doWriteTo(StreamOutput out) throws IOException; + + protected final QueryValidationException addValidationError(String validationError, QueryValidationException validationException) { + return QueryValidationException.addValidationError(getName(), validationError, validationException); + } + + @Override + public final boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + @SuppressWarnings("unchecked") + QB other = (QB) obj; + return Objects.equals(queryName, other.queryName) && + Objects.equals(boost, other.boost) && + doEquals(other); + } + + /** + * Indicates whether some other {@link QueryBuilder} object of the same type is "equal to" this one. + */ + protected abstract boolean doEquals(QB other); + + @Override + public final int hashCode() { + return Objects.hash(getClass(), queryName, boost, doHashCode()); + } + + protected abstract int doHashCode(); + + /** + * This helper method checks if the object passed in is a string, if so it + * converts it to a {@link BytesRef}. + * @param obj the input object + * @return the same input object or a {@link BytesRef} representation if input was of type string + */ + protected static Object convertToBytesRefIfString(Object obj) { + if (obj instanceof String) { + return BytesRefs.toBytesRef(obj); + } + return obj; + } + + /** + * This helper method checks if the object passed in is a {@link BytesRef}, if so it + * converts it to a utf8 string. + * @param obj the input object + * @return the same input object or a utf8 string if input was of type {@link BytesRef} + */ + protected static Object convertToStringIfBytesRef(Object obj) { + if (obj instanceof BytesRef) { + return ((BytesRef) obj).utf8ToString(); + } + return obj; + } + + /** + * Helper method to convert collection of {@link QueryBuilder} instances to lucene + * {@link Query} instances. {@link QueryBuilder} that return null calling + * their {@link QueryBuilder#toQuery(QueryShardContext)} method are not added to the + * resulting collection. + */ + protected static Collection toQueries(Collection queryBuilders, QueryShardContext context) throws QueryShardException, + IOException { + List queries = new ArrayList<>(queryBuilders.size()); + for (QueryBuilder queryBuilder : queryBuilders) { + Query query = queryBuilder.toQuery(context); + if (query != null) { + queries.add(query); + } + } + return queries; + } + + @Override + public String getName() { + //default impl returns the same as writeable name, but we keep the distinction between the two just to make sure + return getWriteableName(); + } + + protected final void writeQueries(StreamOutput out, List queries) throws IOException { + out.writeVInt(queries.size()); + for (QueryBuilder query : queries) { + out.writeQuery(query); + } + } + + protected final List readQueries(StreamInput in) throws IOException { + List queries = new ArrayList<>(); + int size = in.readVInt(); + for (int i = 0; i < size; i++) { + queries.add(in.readQuery()); + } + return queries; + } +} diff --git a/core/src/main/java/org/elasticsearch/index/query/BaseTermQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/BaseTermQueryBuilder.java new file mode 100644 index 00000000000..06666b74120 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/index/query/BaseTermQueryBuilder.java @@ -0,0 +1,165 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Objects; + +public abstract class BaseTermQueryBuilder> extends AbstractQueryBuilder { + + /** Name of field to match against. */ + protected final String fieldName; + + /** Value to find matches for. */ + protected final Object value; + + /** + * Constructs a new base term query. + * + * @param fieldName The name of the field + * @param value The value of the term + */ + public BaseTermQueryBuilder(String fieldName, String value) { + this(fieldName, (Object) value); + } + + /** + * Constructs a new base term query. + * + * @param fieldName The name of the field + * @param value The value of the term + */ + public BaseTermQueryBuilder(String fieldName, int value) { + this(fieldName, (Object) value); + } + + /** + * Constructs a new base term query. + * + * @param fieldName The name of the field + * @param value The value of the term + */ + public BaseTermQueryBuilder(String fieldName, long value) { + this(fieldName, (Object) value); + } + + /** + * Constructs a new base term query. + * + * @param fieldName The name of the field + * @param value The value of the term + */ + public BaseTermQueryBuilder(String fieldName, float value) { + this(fieldName, (Object) value); + } + + /** + * Constructs a new base term query. + * + * @param fieldName The name of the field + * @param value The value of the term + */ + public BaseTermQueryBuilder(String fieldName, double value) { + this(fieldName, (Object) value); + } + + /** + * Constructs a new base term query. + * + * @param fieldName The name of the field + * @param value The value of the term + */ + public BaseTermQueryBuilder(String fieldName, boolean value) { + this(fieldName, (Object) value); + } + + /** + * Constructs a new base term query. + * In case value is assigned to a string, we internally convert it to a {@link BytesRef} + * because in {@link TermQueryParser} and {@link SpanTermQueryParser} string values are parsed to {@link BytesRef} + * and we want internal representation of query to be equal regardless of whether it was created from XContent or via Java API. + * + * @param fieldName The name of the field + * @param value The value of the term + */ + public BaseTermQueryBuilder(String fieldName, Object value) { + if (Strings.isEmpty(fieldName)) { + throw new IllegalArgumentException("field name is null or empty"); + } + if (value == null) { + throw new IllegalArgumentException("value cannot be null"); + } + this.fieldName = fieldName; + this.value = convertToBytesRefIfString(value); + } + + /** Returns the field name used in this query. */ + public String fieldName() { + return this.fieldName; + } + + /** + * Returns the value used in this query. + * If necessary, converts internal {@link BytesRef} representation back to string. + */ + public Object value() { + return convertToStringIfBytesRef(this.value); + } + + @Override + protected void doXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(getName()); + builder.startObject(fieldName); + builder.field("value", convertToStringIfBytesRef(this.value)); + printBoostAndQueryName(builder); + builder.endObject(); + builder.endObject(); + } + + @Override + protected final int doHashCode() { + return Objects.hash(fieldName, value); + } + + @Override + protected final boolean doEquals(BaseTermQueryBuilder other) { + return Objects.equals(fieldName, other.fieldName) && + Objects.equals(value, other.value); + } + + @Override + protected final QB doReadFrom(StreamInput in) throws IOException { + return createBuilder(in.readString(), in.readGenericValue()); + } + + protected abstract QB createBuilder(String fieldName, Object value); + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + out.writeString(fieldName); + out.writeGenericValue(value); + } +} diff --git a/core/src/main/java/org/elasticsearch/index/query/BoolQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/BoolQueryBuilder.java index d3fa9299450..25821e1dd1d 100644 --- a/core/src/main/java/org/elasticsearch/index/query/BoolQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/BoolQueryBuilder.java @@ -19,17 +19,35 @@ package org.elasticsearch.index.query; +import org.apache.lucene.search.BooleanClause; +import org.apache.lucene.search.BooleanClause.Occur; +import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.search.Query; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.xcontent.XContentBuilder; import java.io.IOException; import java.util.ArrayList; import java.util.List; +import java.util.Objects; + +import static org.elasticsearch.common.lucene.search.Queries.fixNegativeQueryIfNeeded; /** * A Query that matches documents matching boolean combinations of other queries. */ -public class BoolQueryBuilder extends QueryBuilder implements BoostableQueryBuilder { +public class BoolQueryBuilder extends AbstractQueryBuilder { + + public static final String NAME = "bool"; + + public static final boolean ADJUST_PURE_NEGATIVE_DEFAULT = true; + + public static final boolean DISABLE_COORD_DEFAULT = false; + + static final BoolQueryBuilder PROTOTYPE = new BoolQueryBuilder(); private final List mustClauses = new ArrayList<>(); @@ -39,63 +57,92 @@ public class BoolQueryBuilder extends QueryBuilder implements BoostableQueryBuil private final List shouldClauses = new ArrayList<>(); - private float boost = -1; + private boolean disableCoord = DISABLE_COORD_DEFAULT; - private Boolean disableCoord; + private boolean adjustPureNegative = ADJUST_PURE_NEGATIVE_DEFAULT; private String minimumShouldMatch; - - private Boolean adjustPureNegative; - - private String queryName; /** * Adds a query that must appear in the matching documents and will - * contribute to scoring. + * contribute to scoring. No null value allowed. */ public BoolQueryBuilder must(QueryBuilder queryBuilder) { + if (queryBuilder == null) { + throw new IllegalArgumentException("inner bool query clause cannot be null"); + } mustClauses.add(queryBuilder); return this; } + /** + * Gets the queries that must appear in the matching documents. + */ + public List must() { + return this.mustClauses; + } + /** * Adds a query that must appear in the matching documents but will - * not contribute to scoring. + * not contribute to scoring. No null value allowed. */ public BoolQueryBuilder filter(QueryBuilder queryBuilder) { + if (queryBuilder == null) { + throw new IllegalArgumentException("inner bool query clause cannot be null"); + } filterClauses.add(queryBuilder); return this; } /** - * Adds a query that must not appear in the matching documents and - * will not contribute to scoring. + * Gets the queries that must appear in the matching documents but don't conntribute to scoring + */ + public List filter() { + return this.filterClauses; + } + + /** + * Adds a query that must not appear in the matching documents. + * No null value allowed. */ public BoolQueryBuilder mustNot(QueryBuilder queryBuilder) { + if (queryBuilder == null) { + throw new IllegalArgumentException("inner bool query clause cannot be null"); + } mustNotClauses.add(queryBuilder); return this; } /** - * Adds a query that should appear in the matching documents. For a boolean query with no + * Gets the queries that must not appear in the matching documents. + */ + public List mustNot() { + return this.mustNotClauses; + } + + /** + * Adds a clause that should be matched by the returned documents. For a boolean query with no * MUST clauses one or more SHOULD clauses must match a document - * for the BooleanQuery to match. + * for the BooleanQuery to match. No null value allowed. * * @see #minimumNumberShouldMatch(int) */ public BoolQueryBuilder should(QueryBuilder queryBuilder) { + if (queryBuilder == null) { + throw new IllegalArgumentException("inner bool query clause cannot be null"); + } shouldClauses.add(queryBuilder); return this; } /** - * Sets the boost for this query. Documents matching this query will (in addition to the normal - * weightings) have their score multiplied by the boost provided. + * Gets the list of clauses that should be matched by the returned documents. + * + * @see #should(QueryBuilder) + * @see #minimumNumberShouldMatch(int) */ - @Override - public BoolQueryBuilder boost(float boost) { - this.boost = boost; - return this; + public List should() { + return this.shouldClauses; } /** @@ -106,6 +153,13 @@ public class BoolQueryBuilder extends QueryBuilder implements BoostableQueryBuil return this; } + /** + * @return whether the Similarity#coord(int,int) in scoring are disabled. Defaults to false. + */ + public boolean disableCoord() { + return this.disableCoord; + } + /** * Specifies a minimum number of the optional (should) boolean clauses which must be satisfied. *

@@ -124,6 +178,23 @@ public class BoolQueryBuilder extends QueryBuilder implements BoostableQueryBuil return this; } + + /** + * Specifies a minimum number of the optional (should) boolean clauses which must be satisfied. + * @see BoolQueryBuilder#minimumNumberShouldMatch(int) + */ + public BoolQueryBuilder minimumNumberShouldMatch(String minimumNumberShouldMatch) { + this.minimumShouldMatch = minimumNumberShouldMatch; + return this; + } + + /** + * @return the string representation of the minimumShouldMatch settings for this query + */ + public String minimumShouldMatch() { + return this.minimumShouldMatch; + } + /** * Sets the minimum should match using the special syntax (for example, supporting percentage). */ @@ -139,7 +210,7 @@ public class BoolQueryBuilder extends QueryBuilder implements BoostableQueryBuil public boolean hasClauses() { return !(mustClauses.isEmpty() && shouldClauses.isEmpty() && mustNotClauses.isEmpty() && filterClauses.isEmpty()); } - + /** * If a boolean query contains only negative ("must not") clauses should the * BooleanQuery be enhanced with a {@link MatchAllDocsQuery} in order to act @@ -151,52 +222,126 @@ public class BoolQueryBuilder extends QueryBuilder implements BoostableQueryBuil } /** - * Sets the query name for the filter that can be used when searching for matched_filters per hit. + * @return the setting for the adjust_pure_negative setting in this query */ - public BoolQueryBuilder queryName(String queryName) { - this.queryName = queryName; - return this; + public boolean adjustPureNegative() { + return this.adjustPureNegative; } @Override protected void doXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject("bool"); + builder.startObject(NAME); doXArrayContent("must", mustClauses, builder, params); doXArrayContent("filter", filterClauses, builder, params); doXArrayContent("must_not", mustNotClauses, builder, params); doXArrayContent("should", shouldClauses, builder, params); - if (boost != -1) { - builder.field("boost", boost); - } - if (disableCoord != null) { - builder.field("disable_coord", disableCoord); - } + builder.field("disable_coord", disableCoord); + builder.field("adjust_pure_negative", adjustPureNegative); if (minimumShouldMatch != null) { builder.field("minimum_should_match", minimumShouldMatch); } - if (adjustPureNegative != null) { - builder.field("adjust_pure_negative", adjustPureNegative); - } - if (queryName != null) { - builder.field("_name", queryName); - } + printBoostAndQueryName(builder); builder.endObject(); } - private void doXArrayContent(String field, List clauses, XContentBuilder builder, Params params) throws IOException { + private static void doXArrayContent(String field, List clauses, XContentBuilder builder, Params params) throws IOException { if (clauses.isEmpty()) { return; } - if (clauses.size() == 1) { - builder.field(field); - clauses.get(0).toXContent(builder, params); - } else { - builder.startArray(field); - for (QueryBuilder clause : clauses) { - clause.toXContent(builder, params); + builder.startArray(field); + for (QueryBuilder clause : clauses) { + clause.toXContent(builder, params); + } + builder.endArray(); + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + protected Query doToQuery(QueryShardContext context) throws IOException { + BooleanQuery.Builder booleanQueryBuilder = new BooleanQuery.Builder(); + booleanQueryBuilder.setDisableCoord(disableCoord); + addBooleanClauses(context, booleanQueryBuilder, mustClauses, BooleanClause.Occur.MUST); + addBooleanClauses(context, booleanQueryBuilder, mustNotClauses, BooleanClause.Occur.MUST_NOT); + addBooleanClauses(context, booleanQueryBuilder, shouldClauses, BooleanClause.Occur.SHOULD); + addBooleanClauses(context, booleanQueryBuilder, filterClauses, BooleanClause.Occur.FILTER); + BooleanQuery booleanQuery = booleanQueryBuilder.build(); + if (booleanQuery.clauses().isEmpty()) { + return new MatchAllDocsQuery(); + } + booleanQuery = Queries.applyMinimumShouldMatch(booleanQuery, minimumShouldMatch); + return adjustPureNegative ? fixNegativeQueryIfNeeded(booleanQuery) : booleanQuery; + } + + private void addBooleanClauses(QueryShardContext context, BooleanQuery.Builder booleanQueryBuilder, List clauses, Occur occurs) throws IOException { + for (QueryBuilder query : clauses) { + Query luceneQuery = null; + switch (occurs) { + case SHOULD: + if (context.isFilter() && minimumShouldMatch == null) { + minimumShouldMatch = "1"; + } + luceneQuery = query.toQuery(context); + break; + case FILTER: + case MUST_NOT: + luceneQuery = query.toFilter(context); + break; + case MUST: + luceneQuery = query.toQuery(context); + } + if (luceneQuery != null) { + booleanQueryBuilder.add(new BooleanClause(luceneQuery, occurs)); } - builder.endArray(); } } + @Override + protected int doHashCode() { + return Objects.hash(adjustPureNegative, disableCoord, + minimumShouldMatch, mustClauses, shouldClauses, mustNotClauses, filterClauses); + } + + @Override + protected boolean doEquals(BoolQueryBuilder other) { + return Objects.equals(adjustPureNegative, other.adjustPureNegative) && + Objects.equals(disableCoord, other.disableCoord) && + Objects.equals(minimumShouldMatch, other.minimumShouldMatch) && + Objects.equals(mustClauses, other.mustClauses) && + Objects.equals(shouldClauses, other.shouldClauses) && + Objects.equals(mustNotClauses, other.mustNotClauses) && + Objects.equals(filterClauses, other.filterClauses); + } + + @Override + protected BoolQueryBuilder doReadFrom(StreamInput in) throws IOException { + BoolQueryBuilder boolQueryBuilder = new BoolQueryBuilder(); + List queryBuilders = readQueries(in); + boolQueryBuilder.mustClauses.addAll(queryBuilders); + queryBuilders = readQueries(in); + boolQueryBuilder.mustNotClauses.addAll(queryBuilders); + queryBuilders = readQueries(in); + boolQueryBuilder.shouldClauses.addAll(queryBuilders); + queryBuilders = readQueries(in); + boolQueryBuilder.filterClauses.addAll(queryBuilders); + boolQueryBuilder.adjustPureNegative = in.readBoolean(); + boolQueryBuilder.disableCoord = in.readBoolean(); + boolQueryBuilder.minimumShouldMatch = in.readOptionalString(); + return boolQueryBuilder; + + } + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + writeQueries(out, mustClauses); + writeQueries(out, mustNotClauses); + writeQueries(out, shouldClauses); + writeQueries(out, filterClauses); + out.writeBoolean(adjustPureNegative); + out.writeBoolean(disableCoord); + out.writeOptionalString(minimumShouldMatch); + } } diff --git a/core/src/main/java/org/elasticsearch/index/query/BoolQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/BoolQueryParser.java index 9c10acf6a39..13b5f509084 100644 --- a/core/src/main/java/org/elasticsearch/index/query/BoolQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/BoolQueryParser.java @@ -19,13 +19,9 @@ package org.elasticsearch.index.query; -import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; -import org.apache.lucene.search.MatchAllDocsQuery; -import org.apache.lucene.search.Query; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentParser; @@ -33,14 +29,10 @@ import java.io.IOException; import java.util.ArrayList; import java.util.List; -import static org.elasticsearch.common.lucene.search.Queries.fixNegativeQueryIfNeeded; - /** - * + * Parser for bool query */ -public class BoolQueryParser implements QueryParser { - - public static final String NAME = "bool"; +public class BoolQueryParser implements QueryParser { @Inject public BoolQueryParser(Settings settings) { @@ -49,23 +41,27 @@ public class BoolQueryParser implements QueryParser { @Override public String[] names() { - return new String[]{NAME}; + return new String[]{BoolQueryBuilder.NAME}; } @Override - public Query parse(QueryParseContext parseContext) throws IOException, ParsingException { + public BoolQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException, ParsingException { XContentParser parser = parseContext.parser(); - boolean disableCoord = false; - float boost = 1.0f; + boolean disableCoord = BoolQueryBuilder.DISABLE_COORD_DEFAULT; + boolean adjustPureNegative = BoolQueryBuilder.ADJUST_PURE_NEGATIVE_DEFAULT; + float boost = AbstractQueryBuilder.DEFAULT_BOOST; String minimumShouldMatch = null; - List clauses = new ArrayList<>(); - boolean adjustPureNegative = true; + final List mustClauses = new ArrayList<>(); + final List mustNotClauses = new ArrayList<>(); + final List shouldClauses = new ArrayList<>(); + final List filterClauses = new ArrayList<>(); String queryName = null; - + String currentFieldName = null; XContentParser.Token token; + QueryBuilder query; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); @@ -74,69 +70,47 @@ public class BoolQueryParser implements QueryParser { } else if (token == XContentParser.Token.START_OBJECT) { switch (currentFieldName) { case "must": - Query query = parseContext.parseInnerQuery(); - if (query != null) { - clauses.add(new BooleanClause(query, BooleanClause.Occur.MUST)); - } + query = parseContext.parseInnerQueryBuilder(); + mustClauses.add(query); break; case "should": - query = parseContext.parseInnerQuery(); - if (query != null) { - clauses.add(new BooleanClause(query, BooleanClause.Occur.SHOULD)); - if (parseContext.isFilter() && minimumShouldMatch == null) { - minimumShouldMatch = "1"; - } - } + query = parseContext.parseInnerQueryBuilder(); + shouldClauses.add(query); break; case "filter": - query = parseContext.parseInnerFilter(); - if (query != null) { - clauses.add(new BooleanClause(query, BooleanClause.Occur.FILTER)); - } + query = parseContext.parseInnerQueryBuilder(); + filterClauses.add(query); break; case "must_not": case "mustNot": - query = parseContext.parseInnerFilter(); - if (query != null) { - clauses.add(new BooleanClause(query, BooleanClause.Occur.MUST_NOT)); - } + query = parseContext.parseInnerQueryBuilder(); + mustNotClauses.add(query); break; default: - throw new ParsingException(parseContext, "[bool] query does not support [" + currentFieldName + "]"); + throw new ParsingException(parser.getTokenLocation(), "[bool] query does not support [" + currentFieldName + "]"); } } else if (token == XContentParser.Token.START_ARRAY) { while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { switch (currentFieldName) { case "must": - Query query = parseContext.parseInnerQuery(); - if (query != null) { - clauses.add(new BooleanClause(query, BooleanClause.Occur.MUST)); - } + query = parseContext.parseInnerQueryBuilder(); + mustClauses.add(query); break; case "should": - query = parseContext.parseInnerQuery(); - if (query != null) { - clauses.add(new BooleanClause(query, BooleanClause.Occur.SHOULD)); - if (parseContext.isFilter() && minimumShouldMatch == null) { - minimumShouldMatch = "1"; - } - } + query = parseContext.parseInnerQueryBuilder(); + shouldClauses.add(query); break; case "filter": - query = parseContext.parseInnerFilter(); - if (query != null) { - clauses.add(new BooleanClause(query, BooleanClause.Occur.FILTER)); - } + query = parseContext.parseInnerQueryBuilder(); + filterClauses.add(query); break; case "must_not": case "mustNot": - query = parseContext.parseInnerFilter(); - if (query != null) { - clauses.add(new BooleanClause(query, BooleanClause.Occur.MUST_NOT)); - } + query = parseContext.parseInnerQueryBuilder(); + mustNotClauses.add(query); break; default: - throw new ParsingException(parseContext, "bool query does not support [" + currentFieldName + "]"); + throw new ParsingException(parser.getTokenLocation(), "bool query does not support [" + currentFieldName + "]"); } } } else if (token.isValue()) { @@ -153,27 +127,33 @@ public class BoolQueryParser implements QueryParser { } else if ("_name".equals(currentFieldName)) { queryName = parser.text(); } else { - throw new ParsingException(parseContext, "[bool] query does not support [" + currentFieldName + "]"); + throw new ParsingException(parser.getTokenLocation(), "[bool] query does not support [" + currentFieldName + "]"); } } } + BoolQueryBuilder boolQuery = new BoolQueryBuilder(); + for (QueryBuilder queryBuilder : mustClauses) { + boolQuery.must(queryBuilder); + } + for (QueryBuilder queryBuilder : mustNotClauses) { + boolQuery.mustNot(queryBuilder); + } + for (QueryBuilder queryBuilder : shouldClauses) { + boolQuery.should(queryBuilder); + } + for (QueryBuilder queryBuilder : filterClauses) { + boolQuery.filter(queryBuilder); + } + boolQuery.boost(boost); + boolQuery.disableCoord(disableCoord); + boolQuery.adjustPureNegative(adjustPureNegative); + boolQuery.minimumNumberShouldMatch(minimumShouldMatch); + boolQuery.queryName(queryName); + return boolQuery; + } - if (clauses.isEmpty()) { - return new MatchAllDocsQuery(); - } - - BooleanQuery.Builder booleanQueryBuilder = new BooleanQuery.Builder(); - booleanQueryBuilder.setDisableCoord(disableCoord); - for (BooleanClause clause : clauses) { - booleanQueryBuilder.add(clause); - } - BooleanQuery booleanQuery = booleanQueryBuilder.build(); - booleanQuery.setBoost(boost); - booleanQuery = Queries.applyMinimumShouldMatch(booleanQuery, minimumShouldMatch); - Query query = adjustPureNegative ? fixNegativeQueryIfNeeded(booleanQuery) : booleanQuery; - if (queryName != null) { - parseContext.addNamedQuery(queryName, query); - } - return query; + @Override + public BoolQueryBuilder getBuilderPrototype() { + return BoolQueryBuilder.PROTOTYPE; } } diff --git a/core/src/main/java/org/elasticsearch/index/query/BoostableQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/BoostableQueryBuilder.java deleted file mode 100644 index 31572ce54a8..00000000000 --- a/core/src/main/java/org/elasticsearch/index/query/BoostableQueryBuilder.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.query; - -/** - * Query builder which allow setting some boost - */ -public interface BoostableQueryBuilder> { - - /** - * Sets the boost for this query. Documents matching this query will (in addition to the normal - * weightings) have their score multiplied by the boost provided. - */ - B boost(float boost); - -} diff --git a/core/src/main/java/org/elasticsearch/index/query/BoostingQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/BoostingQueryBuilder.java index 1e2f9c4d000..c1994a6033e 100644 --- a/core/src/main/java/org/elasticsearch/index/query/BoostingQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/BoostingQueryBuilder.java @@ -19,9 +19,14 @@ package org.elasticsearch.index.query; +import org.apache.lucene.search.Query; +import org.apache.lucene.queries.BoostingQuery; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import java.io.IOException; +import java.util.Objects; /** * The BoostingQuery class can be used to effectively demote results that match a given query. @@ -35,63 +40,122 @@ import java.io.IOException; * multiplied by the supplied "boost" parameter, so this should be less than 1 to achieve a * demoting effect */ -public class BoostingQueryBuilder extends QueryBuilder implements BoostableQueryBuilder { +public class BoostingQueryBuilder extends AbstractQueryBuilder { - private QueryBuilder positiveQuery; + public static final String NAME = "boosting"; - private QueryBuilder negativeQuery; + private final QueryBuilder positiveQuery; + + private final QueryBuilder negativeQuery; private float negativeBoost = -1; - private float boost = -1; + static final BoostingQueryBuilder PROTOTYPE = new BoostingQueryBuilder(EmptyQueryBuilder.PROTOTYPE, EmptyQueryBuilder.PROTOTYPE); - public BoostingQueryBuilder() { - - } - - public BoostingQueryBuilder positive(QueryBuilder positiveQuery) { + /** + * Create a new {@link BoostingQueryBuilder} + * + * @param positiveQuery the positive query for this boosting query. + * @param negativeQuery the negative query for this boosting query. + */ + public BoostingQueryBuilder(QueryBuilder positiveQuery, QueryBuilder negativeQuery) { + if (positiveQuery == null) { + throw new IllegalArgumentException("inner clause [positive] cannot be null."); + } + if (negativeQuery == null) { + throw new IllegalArgumentException("inner clause [negative] cannot be null."); + } this.positiveQuery = positiveQuery; - return this; - } - - public BoostingQueryBuilder negative(QueryBuilder negativeQuery) { this.negativeQuery = negativeQuery; - return this; } + /** + * Get the positive query for this boosting query. + */ + public QueryBuilder positiveQuery() { + return this.positiveQuery; + } + + /** + * Get the negative query for this boosting query. + */ + public QueryBuilder negativeQuery() { + return this.negativeQuery; + } + + /** + * Set the negative boost factor. + */ public BoostingQueryBuilder negativeBoost(float negativeBoost) { + if (negativeBoost < 0) { + throw new IllegalArgumentException("query requires negativeBoost to be set to positive value"); + } this.negativeBoost = negativeBoost; return this; } - @Override - public BoostingQueryBuilder boost(float boost) { - this.boost = boost; - return this; + /** + * Get the negative boost factor. + */ + public float negativeBoost() { + return this.negativeBoost; } @Override protected void doXContent(XContentBuilder builder, Params params) throws IOException { - if (positiveQuery == null) { - throw new IllegalArgumentException("boosting query requires positive query to be set"); - } - if (negativeQuery == null) { - throw new IllegalArgumentException("boosting query requires negative query to be set"); - } - if (negativeBoost == -1) { - throw new IllegalArgumentException("boosting query requires negativeBoost to be set"); - } - builder.startObject(BoostingQueryParser.NAME); + builder.startObject(NAME); builder.field("positive"); positiveQuery.toXContent(builder, params); builder.field("negative"); negativeQuery.toXContent(builder, params); - builder.field("negative_boost", negativeBoost); - - if (boost != -1) { - builder.field("boost", boost); - } + printBoostAndQueryName(builder); builder.endObject(); } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + protected Query doToQuery(QueryShardContext context) throws IOException { + Query positive = positiveQuery.toQuery(context); + Query negative = negativeQuery.toQuery(context); + // make upstream queries ignore this query by returning `null` + // if either inner query builder returns null + if (positive == null || negative == null) { + return null; + } + + return new BoostingQuery(positive, negative, negativeBoost); + } + + @Override + protected int doHashCode() { + return Objects.hash(negativeBoost, positiveQuery, negativeQuery); + } + + @Override + protected boolean doEquals(BoostingQueryBuilder other) { + return Objects.equals(negativeBoost, other.negativeBoost) && + Objects.equals(positiveQuery, other.positiveQuery) && + Objects.equals(negativeQuery, other.negativeQuery); + } + + @Override + protected BoostingQueryBuilder doReadFrom(StreamInput in) throws IOException { + QueryBuilder positiveQuery = in.readQuery(); + QueryBuilder negativeQuery = in.readQuery(); + BoostingQueryBuilder boostingQuery = new BoostingQueryBuilder(positiveQuery, negativeQuery); + boostingQuery.negativeBoost = in.readFloat(); + return boostingQuery; + } + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + out.writeQuery(positiveQuery); + out.writeQuery(negativeQuery); + out.writeFloat(negativeBoost); + } } diff --git a/core/src/main/java/org/elasticsearch/index/query/BoostingQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/BoostingQueryParser.java index b496d2bfd8d..81b32dd1635 100644 --- a/core/src/main/java/org/elasticsearch/index/query/BoostingQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/BoostingQueryParser.java @@ -19,40 +19,32 @@ package org.elasticsearch.index.query; -import org.apache.lucene.queries.BoostingQuery; -import org.apache.lucene.search.Query; import org.elasticsearch.common.ParsingException; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; /** - * + * Parser for boosting query */ -public class BoostingQueryParser implements QueryParser { - - public static final String NAME = "boosting"; - - @Inject - public BoostingQueryParser() { - } +public class BoostingQueryParser implements QueryParser { @Override public String[] names() { - return new String[]{NAME}; + return new String[]{BoostingQueryBuilder.NAME}; } @Override - public Query parse(QueryParseContext parseContext) throws IOException, ParsingException { + public BoostingQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException { XContentParser parser = parseContext.parser(); - Query positiveQuery = null; + QueryBuilder positiveQuery = null; boolean positiveQueryFound = false; - Query negativeQuery = null; + QueryBuilder negativeQuery = null; boolean negativeQueryFound = false; - float boost = -1; + float boost = AbstractQueryBuilder.DEFAULT_BOOST; float negativeBoost = -1; + String queryName = null; String currentFieldName = null; XContentParser.Token token; @@ -61,44 +53,46 @@ public class BoostingQueryParser implements QueryParser { currentFieldName = parser.currentName(); } else if (token == XContentParser.Token.START_OBJECT) { if ("positive".equals(currentFieldName)) { - positiveQuery = parseContext.parseInnerQuery(); + positiveQuery = parseContext.parseInnerQueryBuilder(); positiveQueryFound = true; } else if ("negative".equals(currentFieldName)) { - negativeQuery = parseContext.parseInnerQuery(); + negativeQuery = parseContext.parseInnerQueryBuilder(); negativeQueryFound = true; } else { - throw new ParsingException(parseContext, "[boosting] query does not support [" + currentFieldName + "]"); + throw new ParsingException(parser.getTokenLocation(), "[boosting] query does not support [" + currentFieldName + "]"); } } else if (token.isValue()) { if ("negative_boost".equals(currentFieldName) || "negativeBoost".equals(currentFieldName)) { negativeBoost = parser.floatValue(); + } else if ("_name".equals(currentFieldName)) { + queryName = parser.text(); } else if ("boost".equals(currentFieldName)) { boost = parser.floatValue(); } else { - throw new ParsingException(parseContext, "[boosting] query does not support [" + currentFieldName + "]"); + throw new ParsingException(parser.getTokenLocation(), "[boosting] query does not support [" + currentFieldName + "]"); } } } - if (positiveQuery == null && !positiveQueryFound) { - throw new ParsingException(parseContext, "[boosting] query requires 'positive' query to be set'"); + if (!positiveQueryFound) { + throw new ParsingException(parser.getTokenLocation(), "[boosting] query requires 'positive' query to be set'"); } - if (negativeQuery == null && !negativeQueryFound) { - throw new ParsingException(parseContext, "[boosting] query requires 'negative' query to be set'"); + if (!negativeQueryFound) { + throw new ParsingException(parser.getTokenLocation(), "[boosting] query requires 'negative' query to be set'"); } - if (negativeBoost == -1) { - throw new ParsingException(parseContext, "[boosting] query requires 'negative_boost' to be set'"); + if (negativeBoost < 0) { + throw new ParsingException(parser.getTokenLocation(), "[boosting] query requires 'negative_boost' to be set to be a positive value'"); } - // parsers returned null - if (positiveQuery == null || negativeQuery == null) { - return null; - } - - BoostingQuery boostingQuery = new BoostingQuery(positiveQuery, negativeQuery, negativeBoost); - if (boost != -1) { - boostingQuery.setBoost(boost); - } + BoostingQueryBuilder boostingQuery = new BoostingQueryBuilder(positiveQuery, negativeQuery); + boostingQuery.negativeBoost(negativeBoost); + boostingQuery.boost(boost); + boostingQuery.queryName(queryName); return boostingQuery; } + + @Override + public BoostingQueryBuilder getBuilderPrototype() { + return BoostingQueryBuilder.PROTOTYPE; + } } \ No newline at end of file diff --git a/core/src/main/java/org/elasticsearch/index/query/CommonTermsQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/CommonTermsQueryBuilder.java index 32b74d0c09f..c6b82535408 100644 --- a/core/src/main/java/org/elasticsearch/index/query/CommonTermsQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/CommonTermsQueryBuilder.java @@ -19,12 +19,24 @@ package org.elasticsearch.index.query; +import org.apache.lucene.analysis.Analyzer; +import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.apache.lucene.index.Term; +import org.apache.lucene.queries.ExtendedCommonTermsQuery; +import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.Query; import org.apache.lucene.search.similarities.Similarity; +import org.apache.lucene.util.BytesRefBuilder; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.mapper.MappedFieldType; import java.io.IOException; +import java.util.Objects; /** * CommonTermsQuery query is a query that executes high-frequency terms in a @@ -41,46 +53,58 @@ import java.io.IOException; * low-frequency terms are matched such that this query can improve query * execution times significantly if applicable. */ -public class CommonTermsQueryBuilder extends QueryBuilder implements BoostableQueryBuilder { +public class CommonTermsQueryBuilder extends AbstractQueryBuilder { - public static enum Operator { - OR, AND - } + public static final String NAME = "common"; - private final String name; + public static final float DEFAULT_CUTOFF_FREQ = 0.01f; + + public static final Operator DEFAULT_HIGH_FREQ_OCCUR = Operator.OR; + + public static final Operator DEFAULT_LOW_FREQ_OCCUR = Operator.OR; + + public static final boolean DEFAULT_DISABLE_COORD = true; + + private final String fieldName; private final Object text; - private Operator highFreqOperator = null; + private Operator highFreqOperator = DEFAULT_HIGH_FREQ_OCCUR; - private Operator lowFreqOperator = null; + private Operator lowFreqOperator = DEFAULT_LOW_FREQ_OCCUR; private String analyzer = null; - private Float boost = null; - private String lowFreqMinimumShouldMatch = null; private String highFreqMinimumShouldMatch = null; - private Boolean disableCoord = null; + private boolean disableCoord = DEFAULT_DISABLE_COORD; - private Float cutoffFrequency = null; + private float cutoffFrequency = DEFAULT_CUTOFF_FREQ; - private String queryName; + static final CommonTermsQueryBuilder PROTOTYPE = new CommonTermsQueryBuilder("field", "text"); /** * Constructs a new common terms query. */ - public CommonTermsQueryBuilder(String name, Object text) { - if (name == null) { - throw new IllegalArgumentException("Field name must not be null"); + public CommonTermsQueryBuilder(String fieldName, Object text) { + if (Strings.isEmpty(fieldName)) { + throw new IllegalArgumentException("field name is null or empty"); } if (text == null) { - throw new IllegalArgumentException("Query must not be null"); + throw new IllegalArgumentException("text cannot be null."); } + this.fieldName = fieldName; this.text = text; - this.name = name; + } + + public String fieldName() { + return this.fieldName; + } + + public Object value() { + return this.text; } /** @@ -89,19 +113,27 @@ public class CommonTermsQueryBuilder extends QueryBuilder implements BoostableQu * AND. */ public CommonTermsQueryBuilder highFreqOperator(Operator operator) { - this.highFreqOperator = operator; + this.highFreqOperator = (operator == null) ? DEFAULT_HIGH_FREQ_OCCUR : operator; return this; } + public Operator highFreqOperator() { + return highFreqOperator; + } + /** * Sets the operator to use for terms with a low document frequency (less * than {@link #cutoffFrequency(float)}. Defaults to AND. */ public CommonTermsQueryBuilder lowFreqOperator(Operator operator) { - this.lowFreqOperator = operator; + this.lowFreqOperator = (operator == null) ? DEFAULT_LOW_FREQ_OCCUR : operator; return this; } + public Operator lowFreqOperator() { + return lowFreqOperator; + } + /** * Explicitly set the analyzer to use. Defaults to use explicit mapping * config for the field, or, if not set, the default search analyzer. @@ -111,13 +143,8 @@ public class CommonTermsQueryBuilder extends QueryBuilder implements BoostableQu return this; } - /** - * Set the boost to apply to the query. - */ - @Override - public CommonTermsQueryBuilder boost(float boost) { - this.boost = boost; - return this; + public String analyzer() { + return this.analyzer; } /** @@ -125,13 +152,17 @@ public class CommonTermsQueryBuilder extends QueryBuilder implements BoostableQu * in [0..1] (or absolute number >=1) representing the maximum threshold of * a terms document frequency to be considered a low frequency term. * Defaults to - * {@value CommonTermsQueryParser#DEFAULT_MAX_TERM_DOC_FREQ} + * {@value #DEFAULT_CUTOFF_FREQ} */ public CommonTermsQueryBuilder cutoffFrequency(float cutoffFrequency) { this.cutoffFrequency = cutoffFrequency; return this; } + public float cutoffFrequency() { + return this.cutoffFrequency; + } + /** * Sets the minimum number of high frequent query terms that need to match in order to * produce a hit when there are no low frequen terms. @@ -141,6 +172,10 @@ public class CommonTermsQueryBuilder extends QueryBuilder implements BoostableQu return this; } + public String highFreqMinimumShouldMatch() { + return this.highFreqMinimumShouldMatch; + } + /** * Sets the minimum number of low frequent query terms that need to match in order to * produce a hit. @@ -149,44 +184,32 @@ public class CommonTermsQueryBuilder extends QueryBuilder implements BoostableQu this.lowFreqMinimumShouldMatch = lowFreqMinimumShouldMatch; return this; } - + + public String lowFreqMinimumShouldMatch() { + return this.lowFreqMinimumShouldMatch; + } + public CommonTermsQueryBuilder disableCoord(boolean disableCoord) { this.disableCoord = disableCoord; return this; } - /** - * Sets the query name for the filter that can be used when searching for matched_filters per hit. - */ - public CommonTermsQueryBuilder queryName(String queryName) { - this.queryName = queryName; - return this; + public boolean disableCoord() { + return this.disableCoord; } @Override - public void doXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(CommonTermsQueryParser.NAME); - builder.startObject(name); - + protected void doXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(NAME); + builder.startObject(fieldName); builder.field("query", text); - if (disableCoord != null) { - builder.field("disable_coord", disableCoord); - } - if (highFreqOperator != null) { - builder.field("high_freq_operator", highFreqOperator.toString()); - } - if (lowFreqOperator != null) { - builder.field("low_freq_operator", lowFreqOperator.toString()); - } + builder.field("disable_coord", disableCoord); + builder.field("high_freq_operator", highFreqOperator.toString()); + builder.field("low_freq_operator", lowFreqOperator.toString()); if (analyzer != null) { builder.field("analyzer", analyzer); } - if (boost != null) { - builder.field("boost", boost); - } - if (cutoffFrequency != null) { - builder.field("cutoff_frequency", cutoffFrequency); - } + builder.field("cutoff_frequency", cutoffFrequency); if (lowFreqMinimumShouldMatch != null || highFreqMinimumShouldMatch != null) { builder.startObject("minimum_should_match"); if (lowFreqMinimumShouldMatch != null) { @@ -197,11 +220,113 @@ public class CommonTermsQueryBuilder extends QueryBuilder implements BoostableQu } builder.endObject(); } - if (queryName != null) { - builder.field("_name", queryName); - } - + printBoostAndQueryName(builder); builder.endObject(); builder.endObject(); } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + protected Query doToQuery(QueryShardContext context) throws IOException { + String field; + MappedFieldType fieldType = context.fieldMapper(fieldName); + if (fieldType != null) { + field = fieldType.names().indexName(); + } else { + field = fieldName; + } + + Analyzer analyzerObj; + if (analyzer == null) { + if (fieldType != null) { + analyzerObj = context.getSearchAnalyzer(fieldType); + } else { + analyzerObj = context.mapperService().searchAnalyzer(); + } + } else { + analyzerObj = context.mapperService().analysisService().analyzer(analyzer); + if (analyzerObj == null) { + throw new QueryShardException(context, "[common] analyzer [" + analyzer + "] not found"); + } + } + + Occur highFreqOccur = highFreqOperator.toBooleanClauseOccur(); + Occur lowFreqOccur = lowFreqOperator.toBooleanClauseOccur(); + + ExtendedCommonTermsQuery commonsQuery = new ExtendedCommonTermsQuery(highFreqOccur, lowFreqOccur, cutoffFrequency, disableCoord, fieldType); + return parseQueryString(commonsQuery, text, field, analyzerObj, lowFreqMinimumShouldMatch, highFreqMinimumShouldMatch); + } + + static Query parseQueryString(ExtendedCommonTermsQuery query, Object queryString, String field, Analyzer analyzer, + String lowFreqMinimumShouldMatch, String highFreqMinimumShouldMatch) throws IOException { + // Logic similar to QueryParser#getFieldQuery + int count = 0; + try (TokenStream source = analyzer.tokenStream(field, queryString.toString())) { + source.reset(); + CharTermAttribute termAtt = source.addAttribute(CharTermAttribute.class); + BytesRefBuilder builder = new BytesRefBuilder(); + while (source.incrementToken()) { + // UTF-8 + builder.copyChars(termAtt); + query.add(new Term(field, builder.toBytesRef())); + count++; + } + } + + if (count == 0) { + return null; + } + query.setLowFreqMinimumNumberShouldMatch(lowFreqMinimumShouldMatch); + query.setHighFreqMinimumNumberShouldMatch(highFreqMinimumShouldMatch); + return query; + } + + @Override + protected CommonTermsQueryBuilder doReadFrom(StreamInput in) throws IOException { + CommonTermsQueryBuilder commonTermsQueryBuilder = new CommonTermsQueryBuilder(in.readString(), in.readGenericValue()); + commonTermsQueryBuilder.highFreqOperator = Operator.readOperatorFrom(in); + commonTermsQueryBuilder.lowFreqOperator = Operator.readOperatorFrom(in); + commonTermsQueryBuilder.analyzer = in.readOptionalString(); + commonTermsQueryBuilder.lowFreqMinimumShouldMatch = in.readOptionalString(); + commonTermsQueryBuilder.highFreqMinimumShouldMatch = in.readOptionalString(); + commonTermsQueryBuilder.disableCoord = in.readBoolean(); + commonTermsQueryBuilder.cutoffFrequency = in.readFloat(); + return commonTermsQueryBuilder; + } + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + out.writeString(this.fieldName); + out.writeGenericValue(this.text); + highFreqOperator.writeTo(out); + lowFreqOperator.writeTo(out); + out.writeOptionalString(analyzer); + out.writeOptionalString(lowFreqMinimumShouldMatch); + out.writeOptionalString(highFreqMinimumShouldMatch); + out.writeBoolean(disableCoord); + out.writeFloat(cutoffFrequency); + } + + @Override + protected int doHashCode() { + return Objects.hash(fieldName, text, highFreqOperator, lowFreqOperator, analyzer, + lowFreqMinimumShouldMatch, highFreqMinimumShouldMatch, disableCoord, cutoffFrequency); + } + + @Override + protected boolean doEquals(CommonTermsQueryBuilder other) { + return Objects.equals(fieldName, other.fieldName) && + Objects.equals(text, other.text) && + Objects.equals(highFreqOperator, other.highFreqOperator) && + Objects.equals(lowFreqOperator, other.lowFreqOperator) && + Objects.equals(analyzer, other.analyzer) && + Objects.equals(lowFreqMinimumShouldMatch, other.lowFreqMinimumShouldMatch) && + Objects.equals(highFreqMinimumShouldMatch, other.highFreqMinimumShouldMatch) && + Objects.equals(disableCoord, other.disableCoord) && + Objects.equals(cutoffFrequency, other.cutoffFrequency); + } } diff --git a/core/src/main/java/org/elasticsearch/index/query/CommonTermsQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/CommonTermsQueryParser.java index e59d303018e..86de4e31129 100644 --- a/core/src/main/java/org/elasticsearch/index/query/CommonTermsQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/CommonTermsQueryParser.java @@ -19,64 +19,38 @@ package org.elasticsearch.index.query; -import org.apache.lucene.analysis.Analyzer; -import org.apache.lucene.analysis.TokenStream; -import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; -import org.apache.lucene.index.Term; -import org.apache.lucene.queries.ExtendedCommonTermsQuery; -import org.apache.lucene.search.BooleanClause; -import org.apache.lucene.search.BooleanClause.Occur; -import org.apache.lucene.search.Query; -import org.apache.lucene.util.BytesRefBuilder; import org.elasticsearch.common.ParsingException; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.mapper.MappedFieldType; import java.io.IOException; /** - * + * Parser for common terms query */ -public class CommonTermsQueryParser implements QueryParser { - - public static final String NAME = "common"; - - static final float DEFAULT_MAX_TERM_DOC_FREQ = 0.01f; - - static final Occur DEFAULT_HIGH_FREQ_OCCUR = Occur.SHOULD; - - static final Occur DEFAULT_LOW_FREQ_OCCUR = Occur.SHOULD; - - static final boolean DEFAULT_DISABLE_COORD = true; - - - @Inject - public CommonTermsQueryParser() { - } +public class CommonTermsQueryParser implements QueryParser { @Override public String[] names() { - return new String[] { NAME }; + return new String[] { CommonTermsQueryBuilder.NAME }; } @Override - public Query parse(QueryParseContext parseContext) throws IOException, ParsingException { + public CommonTermsQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException { XContentParser parser = parseContext.parser(); XContentParser.Token token = parser.nextToken(); if (token != XContentParser.Token.FIELD_NAME) { - throw new ParsingException(parseContext, "[common] query malformed, no field"); + throw new ParsingException(parser.getTokenLocation(), "[common] query malformed, no field"); } String fieldName = parser.currentName(); - Object value = null; - float boost = 1.0f; - String queryAnalyzer = null; + Object text = null; + float boost = AbstractQueryBuilder.DEFAULT_BOOST; + String analyzer = null; String lowFreqMinimumShouldMatch = null; String highFreqMinimumShouldMatch = null; - boolean disableCoord = DEFAULT_DISABLE_COORD; - Occur highFreqOccur = DEFAULT_HIGH_FREQ_OCCUR; - Occur lowFreqOccur = DEFAULT_LOW_FREQ_OCCUR; - float maxTermFrequency = DEFAULT_MAX_TERM_DOC_FREQ; + boolean disableCoord = CommonTermsQueryBuilder.DEFAULT_DISABLE_COORD; + Operator highFreqOperator = CommonTermsQueryBuilder.DEFAULT_HIGH_FREQ_OCCUR; + Operator lowFreqOperator = CommonTermsQueryBuilder.DEFAULT_LOW_FREQ_OCCUR; + float cutoffFrequency = CommonTermsQueryBuilder.DEFAULT_CUTOFF_FREQ; String queryName = null; token = parser.nextToken(); if (token == XContentParser.Token.START_OBJECT) { @@ -96,130 +70,66 @@ public class CommonTermsQueryParser implements QueryParser { } else if ("high_freq".equals(innerFieldName) || "highFreq".equals(innerFieldName)) { highFreqMinimumShouldMatch = parser.text(); } else { - throw new ParsingException(parseContext, "[common] query does not support [" + innerFieldName + throw new ParsingException(parser.getTokenLocation(), "[common] query does not support [" + innerFieldName + "] for [" + currentFieldName + "]"); } } } } else { - throw new ParsingException(parseContext, "[common] query does not support [" + currentFieldName + "]"); + throw new ParsingException(parser.getTokenLocation(), "[common] query does not support [" + currentFieldName + "]"); } } else if (token.isValue()) { if ("query".equals(currentFieldName)) { - value = parser.objectText(); + text = parser.objectText(); } else if ("analyzer".equals(currentFieldName)) { - String analyzer = parser.text(); - if (parseContext.analysisService().analyzer(analyzer) == null) { - throw new ParsingException(parseContext, "[common] analyzer [" + parser.text() + "] not found"); - } - queryAnalyzer = analyzer; + analyzer = parser.text(); } else if ("disable_coord".equals(currentFieldName) || "disableCoord".equals(currentFieldName)) { disableCoord = parser.booleanValue(); } else if ("boost".equals(currentFieldName)) { boost = parser.floatValue(); } else if ("high_freq_operator".equals(currentFieldName) || "highFreqOperator".equals(currentFieldName)) { - String op = parser.text(); - if ("or".equalsIgnoreCase(op)) { - highFreqOccur = BooleanClause.Occur.SHOULD; - } else if ("and".equalsIgnoreCase(op)) { - highFreqOccur = BooleanClause.Occur.MUST; - } else { - throw new ParsingException(parseContext, - "[common] query requires operator to be either 'and' or 'or', not [" + op + "]"); - } + highFreqOperator = Operator.fromString(parser.text()); } else if ("low_freq_operator".equals(currentFieldName) || "lowFreqOperator".equals(currentFieldName)) { - String op = parser.text(); - if ("or".equalsIgnoreCase(op)) { - lowFreqOccur = BooleanClause.Occur.SHOULD; - } else if ("and".equalsIgnoreCase(op)) { - lowFreqOccur = BooleanClause.Occur.MUST; - } else { - throw new ParsingException(parseContext, - "[common] query requires operator to be either 'and' or 'or', not [" + op + "]"); - } + lowFreqOperator = Operator.fromString(parser.text()); } else if ("minimum_should_match".equals(currentFieldName) || "minimumShouldMatch".equals(currentFieldName)) { lowFreqMinimumShouldMatch = parser.text(); } else if ("cutoff_frequency".equals(currentFieldName)) { - maxTermFrequency = parser.floatValue(); + cutoffFrequency = parser.floatValue(); } else if ("_name".equals(currentFieldName)) { queryName = parser.text(); } else { - throw new ParsingException(parseContext, "[common] query does not support [" + currentFieldName + "]"); + throw new ParsingException(parser.getTokenLocation(), "[common] query does not support [" + currentFieldName + "]"); } } } parser.nextToken(); } else { - value = parser.objectText(); + text = parser.objectText(); // move to the next token token = parser.nextToken(); if (token != XContentParser.Token.END_OBJECT) { - throw new ParsingException( - parseContext, + throw new ParsingException(parser.getTokenLocation(), "[common] query parsed in simplified form, with direct field name, but included more options than just the field name, possibly use its 'options' form, with 'query' element?"); } } - if (value == null) { - throw new ParsingException(parseContext, "No text specified for text query"); + if (text == null) { + throw new ParsingException(parser.getTokenLocation(), "No text specified for text query"); } - String field; - MappedFieldType fieldType = parseContext.fieldMapper(fieldName); - if (fieldType != null) { - field = fieldType.names().indexName(); - } else { - field = fieldName; - } - - Analyzer analyzer = null; - if (queryAnalyzer == null) { - if (fieldType != null) { - analyzer = fieldType.searchAnalyzer(); - } - if (analyzer == null && fieldType != null) { - analyzer = parseContext.getSearchAnalyzer(fieldType); - } - if (analyzer == null) { - analyzer = parseContext.mapperService().searchAnalyzer(); - } - } else { - analyzer = parseContext.mapperService().analysisService().analyzer(queryAnalyzer); - if (analyzer == null) { - throw new IllegalArgumentException("No analyzer found for [" + queryAnalyzer + "]"); - } - } - - ExtendedCommonTermsQuery commonsQuery = new ExtendedCommonTermsQuery(highFreqOccur, lowFreqOccur, maxTermFrequency, disableCoord, fieldType); - commonsQuery.setBoost(boost); - Query query = parseQueryString(commonsQuery, value.toString(), field, parseContext, analyzer, lowFreqMinimumShouldMatch, highFreqMinimumShouldMatch); - if (queryName != null) { - parseContext.addNamedQuery(queryName, query); - } - return query; + return new CommonTermsQueryBuilder(fieldName, text) + .lowFreqMinimumShouldMatch(lowFreqMinimumShouldMatch) + .highFreqMinimumShouldMatch(highFreqMinimumShouldMatch) + .analyzer(analyzer) + .highFreqOperator(highFreqOperator) + .lowFreqOperator(lowFreqOperator) + .disableCoord(disableCoord) + .cutoffFrequency(cutoffFrequency) + .boost(boost) + .queryName(queryName); } - - private final Query parseQueryString(ExtendedCommonTermsQuery query, String queryString, String field, QueryParseContext parseContext, - Analyzer analyzer, String lowFreqMinimumShouldMatch, String highFreqMinimumShouldMatch) throws IOException { - // Logic similar to QueryParser#getFieldQuery - int count = 0; - try (TokenStream source = analyzer.tokenStream(field, queryString.toString())) { - source.reset(); - CharTermAttribute termAtt = source.addAttribute(CharTermAttribute.class); - BytesRefBuilder builder = new BytesRefBuilder(); - while (source.incrementToken()) { - // UTF-8 - builder.copyChars(termAtt); - query.add(new Term(field, builder.toBytesRef())); - count++; - } - } - - if (count == 0) { - return null; - } - query.setLowFreqMinimumNumberShouldMatch(lowFreqMinimumShouldMatch); - query.setHighFreqMinimumNumberShouldMatch(highFreqMinimumShouldMatch); - return query; + @Override + public CommonTermsQueryBuilder getBuilderPrototype() { + return CommonTermsQueryBuilder.PROTOTYPE; } } diff --git a/core/src/main/java/org/elasticsearch/index/query/ConstantScoreQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/ConstantScoreQueryBuilder.java index bdcbe9cc76c..528871c46af 100644 --- a/core/src/main/java/org/elasticsearch/index/query/ConstantScoreQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/ConstantScoreQueryBuilder.java @@ -19,6 +19,10 @@ package org.elasticsearch.index.query; +import org.apache.lucene.search.ConstantScoreQuery; +import org.apache.lucene.search.Query; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import java.io.IOException; @@ -28,41 +32,76 @@ import java.util.Objects; * A query that wraps a filter and simply returns a constant score equal to the * query boost for every document in the filter. */ -public class ConstantScoreQueryBuilder extends QueryBuilder implements BoostableQueryBuilder { +public class ConstantScoreQueryBuilder extends AbstractQueryBuilder { + + public static final String NAME = "constant_score"; private final QueryBuilder filterBuilder; - private float boost = -1; + static final ConstantScoreQueryBuilder PROTOTYPE = new ConstantScoreQueryBuilder(EmptyQueryBuilder.PROTOTYPE); /** - * A query that wraps a query and simply returns a constant score equal to the + * A query that wraps another query and simply returns a constant score equal to the * query boost for every document in the query. * * @param filterBuilder The query to wrap in a constant score query */ public ConstantScoreQueryBuilder(QueryBuilder filterBuilder) { - this.filterBuilder = Objects.requireNonNull(filterBuilder); + if (filterBuilder == null) { + throw new IllegalArgumentException("inner clause [filter] cannot be null."); + } + this.filterBuilder = filterBuilder; } /** - * Sets the boost for this query. Documents matching this query will (in addition to the normal - * weightings) have their score multiplied by the boost provided. + * @return the query that was wrapped in this constant score query */ - @Override - public ConstantScoreQueryBuilder boost(float boost) { - this.boost = boost; - return this; + public QueryBuilder innerQuery() { + return this.filterBuilder; } @Override protected void doXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(ConstantScoreQueryParser.NAME); + builder.startObject(NAME); builder.field("filter"); filterBuilder.toXContent(builder, params); - - if (boost != -1) { - builder.field("boost", boost); - } + printBoostAndQueryName(builder); builder.endObject(); } -} \ No newline at end of file + + @Override + protected Query doToQuery(QueryShardContext context) throws IOException { + Query innerFilter = filterBuilder.toFilter(context); + if (innerFilter == null ) { + // return null so that parent queries (e.g. bool) also ignore this + return null; + } + return new ConstantScoreQuery(innerFilter); + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + protected int doHashCode() { + return Objects.hash(filterBuilder); + } + + @Override + protected boolean doEquals(ConstantScoreQueryBuilder other) { + return Objects.equals(filterBuilder, other.filterBuilder); + } + + @Override + protected ConstantScoreQueryBuilder doReadFrom(StreamInput in) throws IOException { + QueryBuilder innerFilterBuilder = in.readQuery(); + return new ConstantScoreQueryBuilder(innerFilterBuilder); + } + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + out.writeQuery(filterBuilder); + } +} diff --git a/core/src/main/java/org/elasticsearch/index/query/ConstantScoreQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/ConstantScoreQueryParser.java index 1df3a54489f..8eda81424f2 100644 --- a/core/src/main/java/org/elasticsearch/index/query/ConstantScoreQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/ConstantScoreQueryParser.java @@ -19,40 +19,33 @@ package org.elasticsearch.index.query; -import org.apache.lucene.search.ConstantScoreQuery; -import org.apache.lucene.search.Query; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; /** - * + * Parser for constant_score query */ -public class ConstantScoreQueryParser implements QueryParser { +public class ConstantScoreQueryParser implements QueryParser { - public static final String NAME = "constant_score"; private static final ParseField INNER_QUERY_FIELD = new ParseField("filter", "query"); - @Inject - public ConstantScoreQueryParser() { - } - @Override public String[] names() { - return new String[]{NAME, Strings.toCamelCase(NAME)}; + return new String[]{ConstantScoreQueryBuilder.NAME, Strings.toCamelCase(ConstantScoreQueryBuilder.NAME)}; } @Override - public Query parse(QueryParseContext parseContext) throws IOException, ParsingException { + public ConstantScoreQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException { XContentParser parser = parseContext.parser(); - Query filter = null; + QueryBuilder query = null; boolean queryFound = false; - float boost = 1.0f; + String queryName = null; + float boost = AbstractQueryBuilder.DEFAULT_BOOST; String currentFieldName = null; XContentParser.Token token; @@ -63,29 +56,33 @@ public class ConstantScoreQueryParser implements QueryParser { // skip } else if (token == XContentParser.Token.START_OBJECT) { if (parseContext.parseFieldMatcher().match(currentFieldName, INNER_QUERY_FIELD)) { - filter = parseContext.parseInnerFilter(); + query = parseContext.parseInnerQueryBuilder(); queryFound = true; } else { - throw new ParsingException(parseContext, "[constant_score] query does not support [" + currentFieldName + "]"); + throw new ParsingException(parser.getTokenLocation(), "[constant_score] query does not support [" + currentFieldName + "]"); } } else if (token.isValue()) { - if ("boost".equals(currentFieldName)) { + if ("_name".equals(currentFieldName)) { + queryName = parser.text(); + } else if ("boost".equals(currentFieldName)) { boost = parser.floatValue(); } else { - throw new ParsingException(parseContext, "[constant_score] query does not support [" + currentFieldName + "]"); + throw new ParsingException(parser.getTokenLocation(), "[constant_score] query does not support [" + currentFieldName + "]"); } } } if (!queryFound) { - throw new ParsingException(parseContext, "[constant_score] requires a 'filter' element"); + throw new ParsingException(parser.getTokenLocation(), "[constant_score] requires a 'filter' element"); } - if (filter == null) { - return null; - } - - filter = new ConstantScoreQuery(filter); - filter.setBoost(boost); - return filter; + ConstantScoreQueryBuilder constantScoreBuilder = new ConstantScoreQueryBuilder(query); + constantScoreBuilder.boost(boost); + constantScoreBuilder.queryName(queryName); + return constantScoreBuilder; } -} \ No newline at end of file + + @Override + public ConstantScoreQueryBuilder getBuilderPrototype() { + return ConstantScoreQueryBuilder.PROTOTYPE; + } +} diff --git a/core/src/main/java/org/elasticsearch/index/query/DisMaxQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/DisMaxQueryBuilder.java index 3724a05df9a..8ce9d4681f4 100644 --- a/core/src/main/java/org/elasticsearch/index/query/DisMaxQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/DisMaxQueryBuilder.java @@ -19,42 +19,51 @@ package org.elasticsearch.index.query; +import org.apache.lucene.search.DisjunctionMaxQuery; +import org.apache.lucene.search.Query; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import java.io.IOException; import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.Objects; /** * A query that generates the union of documents produced by its sub-queries, and that scores each document * with the maximum score for that document as produced by any sub-query, plus a tie breaking increment for any * additional matching sub-queries. */ -public class DisMaxQueryBuilder extends QueryBuilder implements BoostableQueryBuilder { +public class DisMaxQueryBuilder extends AbstractQueryBuilder { - private ArrayList queries = new ArrayList<>(); + public static final String NAME = "dis_max"; - private float boost = -1; + private final ArrayList queries = new ArrayList<>(); - private float tieBreaker = -1; + /** Default multiplication factor for breaking ties in document scores.*/ + public static float DEFAULT_TIE_BREAKER = 0.0f; + private float tieBreaker = DEFAULT_TIE_BREAKER; - private String queryName; + static final DisMaxQueryBuilder PROTOTYPE = new DisMaxQueryBuilder(); /** * Add a sub-query to this disjunction. */ public DisMaxQueryBuilder add(QueryBuilder queryBuilder) { + if (queryBuilder == null) { + throw new IllegalArgumentException("inner dismax query clause cannot be null"); + } queries.add(queryBuilder); return this; } /** - * Sets the boost for this query. Documents matching this query will (in addition to the normal - * weightings) have their score multiplied by the boost provided. + * @return an immutable list copy of the current sub-queries of this disjunction */ - @Override - public DisMaxQueryBuilder boost(float boost) { - this.boost = boost; - return this; + public List innerQueries() { + return this.queries; } /** @@ -69,30 +78,65 @@ public class DisMaxQueryBuilder extends QueryBuilder implements BoostableQueryBu } /** - * Sets the query name for the filter that can be used when searching for matched_filters per hit. + * @return the tie breaker score + * @see DisMaxQueryBuilder#tieBreaker(float) */ - public DisMaxQueryBuilder queryName(String queryName) { - this.queryName = queryName; - return this; + public float tieBreaker() { + return this.tieBreaker; } @Override protected void doXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(DisMaxQueryParser.NAME); - if (tieBreaker != -1) { - builder.field("tie_breaker", tieBreaker); - } - if (boost != -1) { - builder.field("boost", boost); - } - if (queryName != null) { - builder.field("_name", queryName); - } + builder.startObject(NAME); + builder.field("tie_breaker", tieBreaker); builder.startArray("queries"); for (QueryBuilder queryBuilder : queries) { queryBuilder.toXContent(builder, params); } builder.endArray(); + printBoostAndQueryName(builder); builder.endObject(); } -} \ No newline at end of file + + @Override + protected Query doToQuery(QueryShardContext context) throws IOException { + // return null if there are no queries at all + Collection luceneQueries = toQueries(queries, context); + if (luceneQueries.isEmpty()) { + return null; + } + + return new DisjunctionMaxQuery(luceneQueries, tieBreaker); + } + + @Override + protected DisMaxQueryBuilder doReadFrom(StreamInput in) throws IOException { + DisMaxQueryBuilder disMax = new DisMaxQueryBuilder(); + List queryBuilders = readQueries(in); + disMax.queries.addAll(queryBuilders); + disMax.tieBreaker = in.readFloat(); + return disMax; + } + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + writeQueries(out, queries); + out.writeFloat(tieBreaker); + } + + @Override + protected int doHashCode() { + return Objects.hash(queries, tieBreaker); + } + + @Override + protected boolean doEquals(DisMaxQueryBuilder other) { + return Objects.equals(queries, other.queries) && + Objects.equals(tieBreaker, other.tieBreaker); + } + + @Override + public String getWriteableName() { + return NAME; + } +} diff --git a/core/src/main/java/org/elasticsearch/index/query/DisMaxQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/DisMaxQueryParser.java index e9780673cf7..a280cdfd837 100644 --- a/core/src/main/java/org/elasticsearch/index/query/DisMaxQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/DisMaxQueryParser.java @@ -19,11 +19,8 @@ package org.elasticsearch.index.query; -import org.apache.lucene.search.DisjunctionMaxQuery; -import org.apache.lucene.search.Query; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; @@ -31,29 +28,23 @@ import java.util.ArrayList; import java.util.List; /** - * + * Parser for dis_max query */ -public class DisMaxQueryParser implements QueryParser { - - public static final String NAME = "dis_max"; - - @Inject - public DisMaxQueryParser() { - } +public class DisMaxQueryParser implements QueryParser { @Override public String[] names() { - return new String[]{NAME, Strings.toCamelCase(NAME)}; + return new String[]{DisMaxQueryBuilder.NAME, Strings.toCamelCase(DisMaxQueryBuilder.NAME)}; } @Override - public Query parse(QueryParseContext parseContext) throws IOException, ParsingException { + public DisMaxQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException { XContentParser parser = parseContext.parser(); - float boost = 1.0f; - float tieBreaker = 0.0f; + float boost = AbstractQueryBuilder.DEFAULT_BOOST; + float tieBreaker = DisMaxQueryBuilder.DEFAULT_TIE_BREAKER; - List queries = new ArrayList<>(); + final List queries = new ArrayList<>(); boolean queriesFound = false; String queryName = null; @@ -65,25 +56,21 @@ public class DisMaxQueryParser implements QueryParser { } else if (token == XContentParser.Token.START_OBJECT) { if ("queries".equals(currentFieldName)) { queriesFound = true; - Query query = parseContext.parseInnerQuery(); - if (query != null) { - queries.add(query); - } + QueryBuilder query = parseContext.parseInnerQueryBuilder(); + queries.add(query); } else { - throw new ParsingException(parseContext, "[dis_max] query does not support [" + currentFieldName + "]"); + throw new ParsingException(parser.getTokenLocation(), "[dis_max] query does not support [" + currentFieldName + "]"); } } else if (token == XContentParser.Token.START_ARRAY) { if ("queries".equals(currentFieldName)) { queriesFound = true; while (token != XContentParser.Token.END_ARRAY) { - Query query = parseContext.parseInnerQuery(); - if (query != null) { - queries.add(query); - } + QueryBuilder query = parseContext.parseInnerQueryBuilder(); + queries.add(query); token = parser.nextToken(); } } else { - throw new ParsingException(parseContext, "[dis_max] query does not support [" + currentFieldName + "]"); + throw new ParsingException(parser.getTokenLocation(), "[dis_max] query does not support [" + currentFieldName + "]"); } } else { if ("boost".equals(currentFieldName)) { @@ -93,24 +80,27 @@ public class DisMaxQueryParser implements QueryParser { } else if ("_name".equals(currentFieldName)) { queryName = parser.text(); } else { - throw new ParsingException(parseContext, "[dis_max] query does not support [" + currentFieldName + "]"); + throw new ParsingException(parser.getTokenLocation(), "[dis_max] query does not support [" + currentFieldName + "]"); } } } if (!queriesFound) { - throw new ParsingException(parseContext, "[dis_max] requires 'queries' field"); + throw new ParsingException(parser.getTokenLocation(), "[dis_max] requires 'queries' field"); } - if (queries.isEmpty()) { - return null; + DisMaxQueryBuilder disMaxQuery = new DisMaxQueryBuilder(); + disMaxQuery.tieBreaker(tieBreaker); + disMaxQuery.queryName(queryName); + disMaxQuery.boost(boost); + for (QueryBuilder query : queries) { + disMaxQuery.add(query); } - - DisjunctionMaxQuery query = new DisjunctionMaxQuery(queries, tieBreaker); - query.setBoost(boost); - if (queryName != null) { - parseContext.addNamedQuery(queryName, query); - } - return query; + return disMaxQuery; } -} \ No newline at end of file + + @Override + public DisMaxQueryBuilder getBuilderPrototype() { + return DisMaxQueryBuilder.PROTOTYPE; + } +} diff --git a/core/src/main/java/org/elasticsearch/index/query/EmptyQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/EmptyQueryBuilder.java new file mode 100644 index 00000000000..7d1761a44b2 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/index/query/EmptyQueryBuilder.java @@ -0,0 +1,111 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query; + +import org.apache.lucene.search.Query; +import org.elasticsearch.action.support.ToXContentToBytes; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentType; + +import java.io.IOException; + +/** + * A {@link QueryBuilder} that is a stand in replacement for an empty query clause in the DSL. + * The current DSL allows parsing inner queries / filters like "{ }", in order to have a + * valid non-null representation of these clauses that actually do nothing we can use this class. + * + * This builder has no corresponding parser and it is not registered under the query name. It is + * intended to be used internally as a stand-in for nested queries that are left empty and should + * be ignored upstream. + */ +public class EmptyQueryBuilder extends ToXContentToBytes implements QueryBuilder { + + public static final String NAME = "empty_query"; + + /** the one and only empty query builder */ + public static final EmptyQueryBuilder PROTOTYPE = new EmptyQueryBuilder(); + + // prevent instances other than prototype + private EmptyQueryBuilder() { + super(XContentType.JSON); + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public String getName() { + return getWriteableName(); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.endObject(); + return builder; + } + + @Override + public Query toQuery(QueryShardContext context) throws IOException { + // empty + return null; + } + + @Override + public Query toFilter(QueryShardContext context) throws IOException { + // empty + return null; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + } + + @Override + public EmptyQueryBuilder readFrom(StreamInput in) throws IOException { + return EmptyQueryBuilder.PROTOTYPE; + } + + @Override + public EmptyQueryBuilder queryName(String queryName) { + //no-op + return this; + } + + @Override + public String queryName() { + return null; + } + + @Override + public float boost() { + return -1; + } + + @Override + public EmptyQueryBuilder boost(float boost) { + //no-op + return this; + } +} diff --git a/core/src/main/java/org/elasticsearch/index/query/ExistsQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/ExistsQueryBuilder.java index 9980d819ea4..89a738ebac0 100644 --- a/core/src/main/java/org/elasticsearch/index/query/ExistsQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/ExistsQueryBuilder.java @@ -19,38 +19,124 @@ package org.elasticsearch.index.query; +import org.apache.lucene.search.*; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.internal.FieldNamesFieldMapper; +import org.elasticsearch.index.mapper.object.ObjectMapper; import java.io.IOException; +import java.util.Collection; +import java.util.Objects; /** * Constructs a query that only match on documents that the field has a value in them. */ -public class ExistsQueryBuilder extends QueryBuilder { +public class ExistsQueryBuilder extends AbstractQueryBuilder { - private String name; + public static final String NAME = "exists"; - private String queryName; + private final String fieldName; - public ExistsQueryBuilder(String name) { - this.name = name; + static final ExistsQueryBuilder PROTOTYPE = new ExistsQueryBuilder("field"); + + public ExistsQueryBuilder(String fieldName) { + if (Strings.isEmpty(fieldName)) { + throw new IllegalArgumentException("field name is null or empty"); + } + this.fieldName = fieldName; } /** - * Sets the query name for the query that can be used when searching for matched_queries per hit. + * @return the field name that has to exist for this query to match */ - public ExistsQueryBuilder queryName(String queryName) { - this.queryName = queryName; - return this; + public String fieldName() { + return this.fieldName; } @Override protected void doXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(ExistsQueryParser.NAME); - builder.field("field", name); - if (queryName != null) { - builder.field("_name", queryName); - } + builder.startObject(NAME); + builder.field("field", fieldName); + printBoostAndQueryName(builder); builder.endObject(); } + + @Override + protected Query doToQuery(QueryShardContext context) throws IOException { + return newFilter(context, fieldName); + } + + public static Query newFilter(QueryShardContext context, String fieldPattern) { + final FieldNamesFieldMapper.FieldNamesFieldType fieldNamesFieldType = (FieldNamesFieldMapper.FieldNamesFieldType)context.mapperService().fullName(FieldNamesFieldMapper.NAME); + if (fieldNamesFieldType == null) { + // can only happen when no types exist, so no docs exist either + return Queries.newMatchNoDocsQuery(); + } + + ObjectMapper objectMapper = context.getObjectMapper(fieldPattern); + if (objectMapper != null) { + // automatic make the object mapper pattern + fieldPattern = fieldPattern + ".*"; + } + + Collection fields = context.simpleMatchToIndexNames(fieldPattern); + if (fields.isEmpty()) { + // no fields exists, so we should not match anything + return Queries.newMatchNoDocsQuery(); + } + + BooleanQuery.Builder boolFilterBuilder = new BooleanQuery.Builder(); + for (String field : fields) { + MappedFieldType fieldType = context.fieldMapper(field); + Query filter = null; + if (fieldNamesFieldType.isEnabled()) { + final String f; + if (fieldType != null) { + f = fieldType.names().indexName(); + } else { + f = field; + } + filter = fieldNamesFieldType.termQuery(f, context); + } + // if _field_names are not indexed, we need to go the slow way + if (filter == null && fieldType != null) { + filter = fieldType.rangeQuery(null, null, true, true); + } + if (filter == null) { + filter = new TermRangeQuery(field, null, null, true, true); + } + boolFilterBuilder.add(filter, BooleanClause.Occur.SHOULD); + } + return new ConstantScoreQuery(boolFilterBuilder.build()); + } + + @Override + protected int doHashCode() { + return Objects.hash(fieldName); + } + + @Override + protected boolean doEquals(ExistsQueryBuilder other) { + return Objects.equals(fieldName, other.fieldName); + } + + @Override + protected ExistsQueryBuilder doReadFrom(StreamInput in) throws IOException { + return new ExistsQueryBuilder(in.readString()); + } + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + out.writeString(fieldName); + } + + @Override + public String getWriteableName() { + return NAME; + } } diff --git a/core/src/main/java/org/elasticsearch/index/query/ExistsQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/ExistsQueryParser.java index de2cbe77e37..f0df84f9aa8 100644 --- a/core/src/main/java/org/elasticsearch/index/query/ExistsQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/ExistsQueryParser.java @@ -19,40 +19,28 @@ package org.elasticsearch.index.query; -import org.apache.lucene.search.*; import org.elasticsearch.common.ParsingException; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.mapper.internal.FieldNamesFieldMapper; -import org.elasticsearch.index.mapper.object.ObjectMapper; import java.io.IOException; -import java.util.Collection; /** - * + * Parser for exists query */ -public class ExistsQueryParser implements QueryParser { - - public static final String NAME = "exists"; - - @Inject - public ExistsQueryParser() { - } +public class ExistsQueryParser implements QueryParser { @Override public String[] names() { - return new String[]{NAME}; + return new String[]{ExistsQueryBuilder.NAME}; } @Override - public Query parse(QueryParseContext parseContext) throws IOException, ParsingException { + public ExistsQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException { XContentParser parser = parseContext.parser(); String fieldPattern = null; String queryName = null; + float boost = AbstractQueryBuilder.DEFAULT_BOOST; XContentParser.Token token; String currentFieldName = null; @@ -64,66 +52,26 @@ public class ExistsQueryParser implements QueryParser { fieldPattern = parser.text(); } else if ("_name".equals(currentFieldName)) { queryName = parser.text(); + } else if ("boost".equals(currentFieldName)) { + boost = parser.floatValue(); } else { - throw new ParsingException(parseContext, "[exists] query does not support [" + currentFieldName + "]"); + throw new ParsingException(parser.getTokenLocation(), "[exists] query does not support [" + currentFieldName + "]"); } } } if (fieldPattern == null) { - throw new ParsingException(parseContext, "exists must be provided with a [field]"); + throw new ParsingException(parser.getTokenLocation(), "exists must be provided with a [field]"); } - return newFilter(parseContext, fieldPattern, queryName); + ExistsQueryBuilder builder = new ExistsQueryBuilder(fieldPattern); + builder.queryName(queryName); + builder.boost(boost); + return builder; } - public static Query newFilter(QueryParseContext parseContext, String fieldPattern, String queryName) { - final FieldNamesFieldMapper.FieldNamesFieldType fieldNamesFieldType = (FieldNamesFieldMapper.FieldNamesFieldType)parseContext.mapperService().fullName(FieldNamesFieldMapper.NAME); - if (fieldNamesFieldType == null) { - // can only happen when no types exist, so no docs exist either - return Queries.newMatchNoDocsQuery(); - } - - ObjectMapper objectMapper = parseContext.getObjectMapper(fieldPattern); - if (objectMapper != null) { - // automatic make the object mapper pattern - fieldPattern = fieldPattern + ".*"; - } - - Collection fields = parseContext.simpleMatchToIndexNames(fieldPattern); - if (fields.isEmpty()) { - // no fields exists, so we should not match anything - return Queries.newMatchNoDocsQuery(); - } - - BooleanQuery.Builder boolFilterBuilder = new BooleanQuery.Builder(); - for (String field : fields) { - MappedFieldType fieldType = parseContext.fieldMapper(field); - Query filter = null; - if (fieldNamesFieldType.isEnabled()) { - final String f; - if (fieldType != null) { - f = fieldType.names().indexName(); - } else { - f = field; - } - filter = fieldNamesFieldType.termQuery(f, parseContext); - } - // if _field_names are not indexed, we need to go the slow way - if (filter == null && fieldType != null) { - filter = fieldType.rangeQuery(null, null, true, true); - } - if (filter == null) { - filter = new TermRangeQuery(field, null, null, true, true); - } - boolFilterBuilder.add(filter, BooleanClause.Occur.SHOULD); - } - - BooleanQuery boolFilter = boolFilterBuilder.build(); - if (queryName != null) { - parseContext.addNamedQuery(queryName, boolFilter); - } - return new ConstantScoreQuery(boolFilter); + @Override + public ExistsQueryBuilder getBuilderPrototype() { + return ExistsQueryBuilder.PROTOTYPE; } - } diff --git a/core/src/main/java/org/elasticsearch/index/query/FieldMaskingSpanQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/FieldMaskingSpanQueryBuilder.java index c118416bfd0..671cfda3e7a 100644 --- a/core/src/main/java/org/elasticsearch/index/query/FieldMaskingSpanQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/FieldMaskingSpanQueryBuilder.java @@ -19,52 +19,106 @@ package org.elasticsearch.index.query; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.spans.FieldMaskingSpanQuery; +import org.apache.lucene.search.spans.SpanQuery; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.mapper.MappedFieldType; import java.io.IOException; +import java.util.Objects; -public class FieldMaskingSpanQueryBuilder extends SpanQueryBuilder implements BoostableQueryBuilder { +public class FieldMaskingSpanQueryBuilder extends AbstractQueryBuilder implements SpanQueryBuilder{ + + public static final String NAME = "field_masking_span"; private final SpanQueryBuilder queryBuilder; - private final String field; + private final String fieldName; - private float boost = -1; + static final FieldMaskingSpanQueryBuilder PROTOTYPE = new FieldMaskingSpanQueryBuilder(new SpanTermQueryBuilder("field", "text"), "field"); - private String queryName; - - - public FieldMaskingSpanQueryBuilder(SpanQueryBuilder queryBuilder, String field) { + /** + * Constructs a new {@link FieldMaskingSpanQueryBuilder} given an inner {@link SpanQueryBuilder} for + * a given field + * @param queryBuilder inner {@link SpanQueryBuilder} + * @param fieldName the field name + */ + public FieldMaskingSpanQueryBuilder(SpanQueryBuilder queryBuilder, String fieldName) { + if (Strings.isEmpty(fieldName)) { + throw new IllegalArgumentException("field name is null or empty"); + } + if (queryBuilder == null) { + throw new IllegalArgumentException("inner clause [query] cannot be null."); + } this.queryBuilder = queryBuilder; - this.field = field; - } - - @Override - public FieldMaskingSpanQueryBuilder boost(float boost) { - this.boost = boost; - return this; + this.fieldName = fieldName; } /** - * Sets the query name for the filter that can be used when searching for matched_filters per hit. + * @return the field name for this query */ - public FieldMaskingSpanQueryBuilder queryName(String queryName) { - this.queryName = queryName; - return this; + public String fieldName() { + return this.fieldName; + } + + /** + * @return the inner {@link QueryBuilder} + */ + public SpanQueryBuilder innerQuery() { + return this.queryBuilder; } @Override protected void doXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(FieldMaskingSpanQueryParser.NAME); + builder.startObject(NAME); builder.field("query"); queryBuilder.toXContent(builder, params); - builder.field("field", field); - if (boost != -1) { - builder.field("boost", boost); - } - if (queryName != null) { - builder.field("_name", queryName); - } + builder.field("field", fieldName); + printBoostAndQueryName(builder); builder.endObject(); } + + @Override + protected SpanQuery doToQuery(QueryShardContext context) throws IOException { + String fieldInQuery = fieldName; + MappedFieldType fieldType = context.fieldMapper(fieldName); + if (fieldType != null) { + fieldInQuery = fieldType.names().indexName(); + } + Query innerQuery = queryBuilder.toQuery(context); + assert innerQuery instanceof SpanQuery; + return new FieldMaskingSpanQuery((SpanQuery)innerQuery, fieldInQuery); + } + + @Override + protected FieldMaskingSpanQueryBuilder doReadFrom(StreamInput in) throws IOException { + QueryBuilder innerQueryBuilder = in.readQuery(); + return new FieldMaskingSpanQueryBuilder((SpanQueryBuilder) innerQueryBuilder, in.readString()); + } + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + out.writeQuery(queryBuilder); + out.writeString(fieldName); + } + + @Override + protected int doHashCode() { + return Objects.hash(queryBuilder, fieldName); + } + + @Override + protected boolean doEquals(FieldMaskingSpanQueryBuilder other) { + return Objects.equals(queryBuilder, other.queryBuilder) && + Objects.equals(fieldName, other.fieldName); + } + + @Override + public String getWriteableName() { + return NAME; + } } diff --git a/core/src/main/java/org/elasticsearch/index/query/FieldMaskingSpanQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/FieldMaskingSpanQueryParser.java index 9a8579118fd..8ce123b78cf 100644 --- a/core/src/main/java/org/elasticsearch/index/query/FieldMaskingSpanQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/FieldMaskingSpanQueryParser.java @@ -19,40 +19,28 @@ package org.elasticsearch.index.query; -import org.apache.lucene.search.Query; -import org.apache.lucene.search.spans.FieldMaskingSpanQuery; -import org.apache.lucene.search.spans.SpanQuery; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.mapper.MappedFieldType; - import java.io.IOException; /** - * + * Parser for field_masking_span query */ -public class FieldMaskingSpanQueryParser implements QueryParser { - - public static final String NAME = "field_masking_span"; - - @Inject - public FieldMaskingSpanQueryParser() { - } +public class FieldMaskingSpanQueryParser implements QueryParser { @Override public String[] names() { - return new String[]{NAME, Strings.toCamelCase(NAME)}; + return new String[]{FieldMaskingSpanQueryBuilder.NAME, Strings.toCamelCase(FieldMaskingSpanQueryBuilder.NAME)}; } @Override - public Query parse(QueryParseContext parseContext) throws IOException, ParsingException { + public FieldMaskingSpanQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException { XContentParser parser = parseContext.parser(); - float boost = 1.0f; + float boost = AbstractQueryBuilder.DEFAULT_BOOST; - SpanQuery inner = null; + SpanQueryBuilder inner = null; String field = null; String queryName = null; @@ -63,13 +51,13 @@ public class FieldMaskingSpanQueryParser implements QueryParser { currentFieldName = parser.currentName(); } else if (token == XContentParser.Token.START_OBJECT) { if ("query".equals(currentFieldName)) { - Query query = parseContext.parseInnerQuery(); - if (!(query instanceof SpanQuery)) { - throw new ParsingException(parseContext, "[field_masking_span] query] must be of type span query"); + QueryBuilder query = parseContext.parseInnerQueryBuilder(); + if (!(query instanceof SpanQueryBuilder)) { + throw new ParsingException(parser.getTokenLocation(), "[field_masking_span] query must be of type span query"); } - inner = (SpanQuery) query; + inner = (SpanQueryBuilder) query; } else { - throw new ParsingException(parseContext, "[field_masking_span] query does not support [" + throw new ParsingException(parser.getTokenLocation(), "[field_masking_span] query does not support [" + currentFieldName + "]"); } } else { @@ -80,27 +68,25 @@ public class FieldMaskingSpanQueryParser implements QueryParser { } else if ("_name".equals(currentFieldName)) { queryName = parser.text(); } else { - throw new ParsingException(parseContext, "[field_masking_span] query does not support [" + currentFieldName + "]"); + throw new ParsingException(parser.getTokenLocation(), "[field_masking_span] query does not support [" + currentFieldName + "]"); } } } if (inner == null) { - throw new ParsingException(parseContext, "field_masking_span must have [query] span query clause"); + throw new ParsingException(parser.getTokenLocation(), "field_masking_span must have [query] span query clause"); } if (field == null) { - throw new ParsingException(parseContext, "field_masking_span must have [field] set for it"); + throw new ParsingException(parser.getTokenLocation(), "field_masking_span must have [field] set for it"); } - MappedFieldType fieldType = parseContext.fieldMapper(field); - if (fieldType != null) { - field = fieldType.names().indexName(); - } - - FieldMaskingSpanQuery query = new FieldMaskingSpanQuery(inner, field); - query.setBoost(boost); - if (queryName != null) { - parseContext.addNamedQuery(queryName, query); - } - return query; + FieldMaskingSpanQueryBuilder queryBuilder = new FieldMaskingSpanQueryBuilder(inner, field); + queryBuilder.boost(boost); + queryBuilder.queryName(queryName); + return queryBuilder; } -} \ No newline at end of file + + @Override + public FieldMaskingSpanQueryBuilder getBuilderPrototype() { + return FieldMaskingSpanQueryBuilder.PROTOTYPE; + } +} diff --git a/core/src/main/java/org/elasticsearch/index/query/FuzzyQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/FuzzyQueryBuilder.java index 23557b1a4cf..7820f1b5a1b 100644 --- a/core/src/main/java/org/elasticsearch/index/query/FuzzyQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/FuzzyQueryBuilder.java @@ -19,177 +19,273 @@ package org.elasticsearch.index.query; +import org.apache.lucene.index.Term; +import org.apache.lucene.search.FuzzyQuery; +import org.apache.lucene.search.MultiTermQuery; +import org.apache.lucene.search.Query; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.query.support.QueryParsers; import java.io.IOException; +import java.util.Objects; /** * A Query that does fuzzy matching for a specific value. */ -public class FuzzyQueryBuilder extends MultiTermQueryBuilder implements BoostableQueryBuilder { +public class FuzzyQueryBuilder extends AbstractQueryBuilder implements MultiTermQueryBuilder { - private final String name; + public static final String NAME = "fuzzy"; + + /** Default maximum edit distance. Defaults to AUTO. */ + public static final Fuzziness DEFAULT_FUZZINESS = Fuzziness.AUTO; + + /** Default number of initial characters which will not be “fuzzified”. Defaults to 0. */ + public static final int DEFAULT_PREFIX_LENGTH = FuzzyQuery.defaultPrefixLength; + + /** Default maximum number of terms that the fuzzy query will expand to. Defaults to 50. */ + public static final int DEFAULT_MAX_EXPANSIONS = FuzzyQuery.defaultMaxExpansions; + + /** Default as to whether transpositions should be treated as a primitive edit operation, + * instead of classic Levenshtein algorithm. Defaults to false. */ + public static final boolean DEFAULT_TRANSPOSITIONS = false; + + private final String fieldName; private final Object value; - private float boost = -1; + private Fuzziness fuzziness = DEFAULT_FUZZINESS; - private Fuzziness fuzziness; + private int prefixLength = DEFAULT_PREFIX_LENGTH; - private Integer prefixLength; + private int maxExpansions = DEFAULT_MAX_EXPANSIONS; - private Integer maxExpansions; - //LUCENE 4 UPGRADE we need a testcase for this + documentation - private Boolean transpositions; + private boolean transpositions = DEFAULT_TRANSPOSITIONS; private String rewrite; - private String queryName; + static final FuzzyQueryBuilder PROTOTYPE = new FuzzyQueryBuilder(); /** * Constructs a new fuzzy query. * - * @param name The name of the field + * @param fieldName The name of the field * @param value The value of the text */ - public FuzzyQueryBuilder(String name, Object value) { - this.name = name; - this.value = value; + public FuzzyQueryBuilder(String fieldName, String value) { + this(fieldName, (Object) value); } /** * Constructs a new fuzzy query. * - * @param name The name of the field + * @param fieldName The name of the field * @param value The value of the text */ - public FuzzyQueryBuilder(String name, String value) { - this(name, (Object) value); + public FuzzyQueryBuilder(String fieldName, int value) { + this(fieldName, (Object) value); } /** * Constructs a new fuzzy query. * - * @param name The name of the field + * @param fieldName The name of the field * @param value The value of the text */ - public FuzzyQueryBuilder(String name, int value) { - this(name, (Object) value); + public FuzzyQueryBuilder(String fieldName, long value) { + this(fieldName, (Object) value); } /** * Constructs a new fuzzy query. * - * @param name The name of the field + * @param fieldName The name of the field * @param value The value of the text */ - public FuzzyQueryBuilder(String name, long value) { - this(name, (Object) value); + public FuzzyQueryBuilder(String fieldName, float value) { + this(fieldName, (Object) value); } /** * Constructs a new fuzzy query. * - * @param name The name of the field + * @param fieldName The name of the field * @param value The value of the text */ - public FuzzyQueryBuilder(String name, float value) { - this(name, (Object) value); + public FuzzyQueryBuilder(String fieldName, double value) { + this(fieldName, (Object) value); } /** * Constructs a new fuzzy query. * - * @param name The name of the field + * @param fieldName The name of the field * @param value The value of the text */ - public FuzzyQueryBuilder(String name, double value) { - this(name, (Object) value); + public FuzzyQueryBuilder(String fieldName, boolean value) { + this(fieldName, (Object) value); } - // NO COMMIT: not sure we should also allow boolean? /** * Constructs a new fuzzy query. * - * @param name The name of the field - * @param value The value of the text + * @param fieldName The name of the field + * @param value The value of the term */ - public FuzzyQueryBuilder(String name, boolean value) { - this(name, (Object) value); + public FuzzyQueryBuilder(String fieldName, Object value) { + if (Strings.isEmpty(fieldName)) { + throw new IllegalArgumentException("field name cannot be null or empty."); + } + if (value == null) { + throw new IllegalArgumentException("query value cannot be null"); + } + this.fieldName = fieldName; + this.value = convertToBytesRefIfString(value); } - /** - * Sets the boost for this query. Documents matching this query will (in addition to the normal - * weightings) have their score multiplied by the boost provided. - */ - @Override - public FuzzyQueryBuilder boost(float boost) { - this.boost = boost; - return this; + private FuzzyQueryBuilder() { + // for protoype + this.fieldName = null; + this.value = null; + } + + public String fieldName() { + return this.fieldName; + } + + public Object value() { + return convertToStringIfBytesRef(this.value); } public FuzzyQueryBuilder fuzziness(Fuzziness fuzziness) { - this.fuzziness = fuzziness; + this.fuzziness = (fuzziness == null) ? DEFAULT_FUZZINESS : fuzziness; return this; } + public Fuzziness fuzziness() { + return this.fuzziness; + } + public FuzzyQueryBuilder prefixLength(int prefixLength) { this.prefixLength = prefixLength; return this; } + public int prefixLength() { + return this.prefixLength; + } + public FuzzyQueryBuilder maxExpansions(int maxExpansions) { this.maxExpansions = maxExpansions; return this; } - + + public int maxExpansions() { + return this.maxExpansions; + } + public FuzzyQueryBuilder transpositions(boolean transpositions) { this.transpositions = transpositions; return this; } + public boolean transpositions() { + return this.transpositions; + } + public FuzzyQueryBuilder rewrite(String rewrite) { this.rewrite = rewrite; return this; } - /** - * Sets the query name for the filter that can be used when searching for matched_filters per hit. - */ - public FuzzyQueryBuilder queryName(String queryName) { - this.queryName = queryName; - return this; + public String rewrite() { + return this.rewrite; } @Override public void doXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(FuzzyQueryParser.NAME); - builder.startObject(name); - builder.field("value", value); - if (boost != -1) { - builder.field("boost", boost); - } - if (transpositions != null) { - builder.field("transpositions", transpositions); - } - if (fuzziness != null) { - fuzziness.toXContent(builder, params); - } - if (prefixLength != null) { - builder.field("prefix_length", prefixLength); - } - if (maxExpansions != null) { - builder.field("max_expansions", maxExpansions); - } + builder.startObject(NAME); + builder.startObject(fieldName); + builder.field("value", convertToStringIfBytesRef(this.value)); + fuzziness.toXContent(builder, params); + builder.field("prefix_length", prefixLength); + builder.field("max_expansions", maxExpansions); + builder.field("transpositions", transpositions); if (rewrite != null) { builder.field("rewrite", rewrite); } - if (queryName != null) { - builder.field("_name", queryName); - } + printBoostAndQueryName(builder); builder.endObject(); builder.endObject(); } -} \ No newline at end of file + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public Query doToQuery(QueryShardContext context) throws IOException { + Query query = null; + if (rewrite == null && context.isFilter()) { + rewrite = QueryParsers.CONSTANT_SCORE.getPreferredName(); + } + MappedFieldType fieldType = context.fieldMapper(fieldName); + if (fieldType != null) { + query = fieldType.fuzzyQuery(value, fuzziness, prefixLength, maxExpansions, transpositions); + } + if (query == null) { + int maxEdits = fuzziness.asDistance(BytesRefs.toString(value)); + query = new FuzzyQuery(new Term(fieldName, BytesRefs.toBytesRef(value)), maxEdits, prefixLength, maxExpansions, transpositions); + } + if (query instanceof MultiTermQuery) { + MultiTermQuery.RewriteMethod rewriteMethod = QueryParsers.parseRewriteMethod(context.parseFieldMatcher(), rewrite, null); + QueryParsers.setRewriteMethod((MultiTermQuery) query, rewriteMethod); + } + return query; + } + + @Override + public FuzzyQueryBuilder doReadFrom(StreamInput in) throws IOException { + FuzzyQueryBuilder fuzzyQueryBuilder = new FuzzyQueryBuilder(in.readString(), in.readGenericValue()); + fuzzyQueryBuilder.fuzziness = Fuzziness.readFuzzinessFrom(in); + fuzzyQueryBuilder.prefixLength = in.readVInt(); + fuzzyQueryBuilder.maxExpansions = in.readVInt(); + fuzzyQueryBuilder.transpositions = in.readBoolean(); + fuzzyQueryBuilder.rewrite = in.readOptionalString(); + return fuzzyQueryBuilder; + } + + @Override + public void doWriteTo(StreamOutput out) throws IOException { + out.writeString(this.fieldName); + out.writeGenericValue(this.value); + this.fuzziness.writeTo(out); + out.writeVInt(this.prefixLength); + out.writeVInt(this.maxExpansions); + out.writeBoolean(this.transpositions); + out.writeOptionalString(this.rewrite); + } + + @Override + public int doHashCode() { + return Objects.hash(fieldName, value, fuzziness, prefixLength, maxExpansions, transpositions, rewrite); + } + + @Override + public boolean doEquals(FuzzyQueryBuilder other) { + return Objects.equals(fieldName, other.fieldName) && + Objects.equals(value, other.value) && + Objects.equals(fuzziness, other.fuzziness) && + Objects.equals(prefixLength, other.prefixLength) && + Objects.equals(maxExpansions, other.maxExpansions) && + Objects.equals(transpositions, other.transpositions) && + Objects.equals(rewrite, other.rewrite); + } +} diff --git a/core/src/main/java/org/elasticsearch/index/query/FuzzyQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/FuzzyQueryParser.java index c2013eade4f..340094af7ce 100644 --- a/core/src/main/java/org/elasticsearch/index/query/FuzzyQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/FuzzyQueryParser.java @@ -19,61 +19,42 @@ package org.elasticsearch.index.query; -import org.apache.lucene.index.Term; -import org.apache.lucene.search.FuzzyQuery; -import org.apache.lucene.search.MultiTermQuery; -import org.apache.lucene.search.Query; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.query.support.QueryParsers; - import java.io.IOException; -/** - * - */ -public class FuzzyQueryParser implements QueryParser { +public class FuzzyQueryParser implements QueryParser { - public static final String NAME = "fuzzy"; - private static final Fuzziness DEFAULT_FUZZINESS = Fuzziness.AUTO; private static final ParseField FUZZINESS = Fuzziness.FIELD.withDeprecation("min_similarity"); - - @Inject - public FuzzyQueryParser() { - } - @Override public String[] names() { - return new String[]{NAME}; + return new String[]{ FuzzyQueryBuilder.NAME }; } @Override - public Query parse(QueryParseContext parseContext) throws IOException, ParsingException { + public FuzzyQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException { XContentParser parser = parseContext.parser(); XContentParser.Token token = parser.nextToken(); if (token != XContentParser.Token.FIELD_NAME) { - throw new ParsingException(parseContext, "[fuzzy] query malformed, no field"); + throw new ParsingException(parser.getTokenLocation(), "[fuzzy] query malformed, no field"); } - String fieldName = parser.currentName(); + String fieldName = parser.currentName(); Object value = null; - float boost = 1.0f; - Fuzziness fuzziness = DEFAULT_FUZZINESS; - int prefixLength = FuzzyQuery.defaultPrefixLength; - int maxExpansions = FuzzyQuery.defaultMaxExpansions; - boolean transpositions = FuzzyQuery.defaultTranspositions; + + Fuzziness fuzziness = FuzzyQueryBuilder.DEFAULT_FUZZINESS; + int prefixLength = FuzzyQueryBuilder.DEFAULT_PREFIX_LENGTH; + int maxExpansions = FuzzyQueryBuilder.DEFAULT_MAX_EXPANSIONS; + boolean transpositions = FuzzyQueryBuilder.DEFAULT_TRANSPOSITIONS; + String rewrite = null; + String queryName = null; - MultiTermQuery.RewriteMethod rewriteMethod = null; - if (parseContext.isFilter()) { - rewriteMethod = MultiTermQuery.CONSTANT_SCORE_REWRITE; - } + float boost = AbstractQueryBuilder.DEFAULT_BOOST; + token = parser.nextToken(); if (token == XContentParser.Token.START_OBJECT) { String currentFieldName = null; @@ -94,13 +75,13 @@ public class FuzzyQueryParser implements QueryParser { } else if ("max_expansions".equals(currentFieldName) || "maxExpansions".equals(currentFieldName)) { maxExpansions = parser.intValue(); } else if ("transpositions".equals(currentFieldName)) { - transpositions = parser.booleanValue(); + transpositions = parser.booleanValue(); } else if ("rewrite".equals(currentFieldName)) { - rewriteMethod = QueryParsers.parseRewriteMethod(parseContext.parseFieldMatcher(), parser.textOrNull(), null); + rewrite = parser.textOrNull(); } else if ("_name".equals(currentFieldName)) { queryName = parser.text(); } else { - throw new ParsingException(parseContext, "[fuzzy] query does not support [" + currentFieldName + "]"); + throw new ParsingException(parser.getTokenLocation(), "[fuzzy] query does not support [" + currentFieldName + "]"); } } } @@ -112,26 +93,20 @@ public class FuzzyQueryParser implements QueryParser { } if (value == null) { - throw new ParsingException(parseContext, "No value specified for fuzzy query"); + throw new ParsingException(parser.getTokenLocation(), "no value specified for fuzzy query"); } - - Query query = null; - MappedFieldType fieldType = parseContext.fieldMapper(fieldName); - if (fieldType != null) { - query = fieldType.fuzzyQuery(value, fuzziness, prefixLength, maxExpansions, transpositions); - } - if (query == null) { - int maxEdits = fuzziness.asDistance(BytesRefs.toString(value)); - query = new FuzzyQuery(new Term(fieldName, BytesRefs.toBytesRef(value)), maxEdits, prefixLength, maxExpansions, transpositions); - } - if (query instanceof MultiTermQuery) { - QueryParsers.setRewriteMethod((MultiTermQuery) query, rewriteMethod); - } - query.setBoost(boost); + return new FuzzyQueryBuilder(fieldName, value) + .fuzziness(fuzziness) + .prefixLength(prefixLength) + .maxExpansions(maxExpansions) + .transpositions(transpositions) + .rewrite(rewrite) + .boost(boost) + .queryName(queryName); + } - if (queryName != null) { - parseContext.addNamedQuery(queryName, query); - } - return query; + @Override + public FuzzyQueryBuilder getBuilderPrototype() { + return FuzzyQueryBuilder.PROTOTYPE; } } diff --git a/core/src/main/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilder.java index 0f08a7cc3d0..4ad63ebc1fc 100644 --- a/core/src/main/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilder.java @@ -19,174 +19,319 @@ package org.elasticsearch.index.query; +import org.apache.lucene.search.Query; +import org.elasticsearch.Version; +import org.elasticsearch.common.Numbers; import org.elasticsearch.common.geo.GeoPoint; +import org.elasticsearch.common.geo.GeoUtils; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.fielddata.IndexGeoPointFieldData; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper; +import org.elasticsearch.index.search.geo.InMemoryGeoBoundingBoxQuery; +import org.elasticsearch.index.search.geo.IndexedGeoBoundingBoxQuery; import java.io.IOException; +import java.util.Objects; -public class GeoBoundingBoxQueryBuilder extends QueryBuilder { +/** + * Creates a Lucene query that will filter for all documents that lie within the specified + * bounding box. + * + * This query can only operate on fields of type geo_point that have latitude and longitude + * enabled. + * */ +public class GeoBoundingBoxQueryBuilder extends AbstractQueryBuilder { + /** Name of the query. */ + public static final String NAME = "geo_bbox"; + /** Default type for executing this query (memory as of this writing). */ + public static final GeoExecType DEFAULT_TYPE = GeoExecType.MEMORY; + /** Needed for serialization. */ + static final GeoBoundingBoxQueryBuilder PROTOTYPE = new GeoBoundingBoxQueryBuilder(""); - public static final String TOP_LEFT = GeoBoundingBoxQueryParser.TOP_LEFT; - public static final String BOTTOM_RIGHT = GeoBoundingBoxQueryParser.BOTTOM_RIGHT; + /** Name of field holding geo coordinates to compute the bounding box on.*/ + private final String fieldName; + /** Top left corner coordinates of bounding box. */ + private GeoPoint topLeft = new GeoPoint(Double.NaN, Double.NaN); + /** Bottom right corner coordinates of bounding box.*/ + private GeoPoint bottomRight = new GeoPoint(Double.NaN, Double.NaN); + /** How to deal with incorrect coordinates.*/ + private GeoValidationMethod validationMethod = GeoValidationMethod.DEFAULT; + /** How the query should be run. */ + private GeoExecType type = DEFAULT_TYPE; - private static final int TOP = 0; - private static final int LEFT = 1; - private static final int BOTTOM = 2; - private static final int RIGHT = 3; - - private final String name; - - private double[] box = {Double.NaN, Double.NaN, Double.NaN, Double.NaN}; - - private String queryName; - private String type; - private Boolean coerce; - private Boolean ignoreMalformed; - - public GeoBoundingBoxQueryBuilder(String name) { - this.name = name; + /** + * Create new bounding box query. + * @param fieldName name of index field containing geo coordinates to operate on. + * */ + public GeoBoundingBoxQueryBuilder(String fieldName) { + if (fieldName == null) { + throw new IllegalArgumentException("Field name must not be empty."); + } + this.fieldName = fieldName; } /** * Adds top left point. - * - * @param lat The latitude - * @param lon The longitude + * @param top The top latitude + * @param left The left longitude + * @param bottom The bottom latitude + * @param right The right longitude */ - public GeoBoundingBoxQueryBuilder topLeft(double lat, double lon) { - box[TOP] = lat; - box[LEFT] = lon; + public GeoBoundingBoxQueryBuilder setCorners(double top, double left, double bottom, double right) { + if (GeoValidationMethod.isIgnoreMalformed(validationMethod) == false) { + if (Numbers.isValidDouble(top) == false) { + throw new IllegalArgumentException("top latitude is invalid: " + top); + } + if (Numbers.isValidDouble(left) == false) { + throw new IllegalArgumentException("left longitude is invalid: " + left); + } + if (Numbers.isValidDouble(bottom) == false) { + throw new IllegalArgumentException("bottom latitude is invalid: " + bottom); + } + if (Numbers.isValidDouble(right) == false) { + throw new IllegalArgumentException("right longitude is invalid: " + right); + } + + // all corners are valid after above checks - make sure they are in the right relation + if (top < bottom) { + throw new IllegalArgumentException("top is below bottom corner: " + + top + " vs. " + bottom); + } + + // we do not check longitudes as the query generation code can deal with flipped left/right values + } + + topLeft.reset(top, left); + bottomRight.reset(bottom, right); return this; } - public GeoBoundingBoxQueryBuilder topLeft(GeoPoint point) { - return topLeft(point.lat(), point.lon()); - } - - public GeoBoundingBoxQueryBuilder topLeft(String geohash) { - return topLeft(GeoPoint.fromGeohash(geohash)); - } - /** - * Adds bottom right corner. - * - * @param lat The latitude - * @param lon The longitude - */ - public GeoBoundingBoxQueryBuilder bottomRight(double lat, double lon) { - box[BOTTOM] = lat; - box[RIGHT] = lon; - return this; - } - - public GeoBoundingBoxQueryBuilder bottomRight(GeoPoint point) { - return bottomRight(point.lat(), point.lon()); - } - - public GeoBoundingBoxQueryBuilder bottomRight(String geohash) { - return bottomRight(GeoPoint.fromGeohash(geohash)); + * Adds points. + * @param topLeft topLeft point to add. + * @param bottomRight bottomRight point to add. + * */ + public GeoBoundingBoxQueryBuilder setCorners(GeoPoint topLeft, GeoPoint bottomRight) { + return setCorners(topLeft.getLat(), topLeft.getLon(), bottomRight.getLat(), bottomRight.getLon()); } /** - * Adds bottom left corner. + * Adds points. + * @param topLeft topLeft point to add as geohash. + * @param bottomRight bottomRight point to add as geohash. + * */ + public GeoBoundingBoxQueryBuilder setCorners(String topLeft, String bottomRight) { + return setCorners(GeoPoint.fromGeohash(topLeft), GeoPoint.fromGeohash(bottomRight)); + } + + /** Returns the top left corner of the bounding box. */ + public GeoPoint topLeft() { + return topLeft; + } + + /** Returns the bottom right corner of the bounding box. */ + public GeoPoint bottomRight() { + return bottomRight; + } + + /** + * Adds corners in OGC standard bbox/ envelop format. * - * @param lat The latitude - * @param lon The longitude + * @param bottomLeft bottom left corner of bounding box. + * @param topRight top right corner of bounding box. */ - public GeoBoundingBoxQueryBuilder bottomLeft(double lat, double lon) { - box[BOTTOM] = lat; - box[LEFT] = lon; + public GeoBoundingBoxQueryBuilder setCornersOGC(GeoPoint bottomLeft, GeoPoint topRight) { + return setCorners(topRight.getLat(), bottomLeft.getLon(), bottomLeft.getLat(), topRight.getLon()); + } + + /** + * Adds corners in OGC standard bbox/ envelop format. + * + * @param bottomLeft bottom left corner geohash. + * @param topRight top right corner geohash. + */ + public GeoBoundingBoxQueryBuilder setCornersOGC(String bottomLeft, String topRight) { + return setCornersOGC(GeoPoint.fromGeohash(bottomLeft), GeoPoint.fromGeohash(topRight)); + } + + /** + * Specify whether or not to ignore validation errors of bounding boxes. + * Can only be set if coerce set to false, otherwise calling this + * method has no effect. + **/ + public GeoBoundingBoxQueryBuilder setValidationMethod(GeoValidationMethod method) { + this.validationMethod = method; return this; } - - public GeoBoundingBoxQueryBuilder bottomLeft(GeoPoint point) { - return bottomLeft(point.lat(), point.lon()); - } - - public GeoBoundingBoxQueryBuilder bottomLeft(String geohash) { - return bottomLeft(GeoPoint.fromGeohash(geohash)); - } /** - * Adds top right point. - * - * @param lat The latitude - * @param lon The longitude - */ - public GeoBoundingBoxQueryBuilder topRight(double lat, double lon) { - box[TOP] = lat; - box[RIGHT] = lon; - return this; - } - - public GeoBoundingBoxQueryBuilder topRight(GeoPoint point) { - return topRight(point.lat(), point.lon()); - } - - public GeoBoundingBoxQueryBuilder topRight(String geohash) { - return topRight(GeoPoint.fromGeohash(geohash)); - } - - /** - * Sets the filter name for the filter that can be used when searching for matched_filters per hit. - */ - public GeoBoundingBoxQueryBuilder queryName(String queryName) { - this.queryName = queryName; - return this; - } - - public GeoBoundingBoxQueryBuilder coerce(boolean coerce) { - this.coerce = coerce; - return this; - } - - public GeoBoundingBoxQueryBuilder ignoreMalformed(boolean ignoreMalformed) { - this.ignoreMalformed = ignoreMalformed; - return this; + * Returns geo coordinate validation method to use. + * */ + public GeoValidationMethod getValidationMethod() { + return this.validationMethod; } /** * Sets the type of executing of the geo bounding box. Can be either `memory` or `indexed`. Defaults * to `memory`. */ - public GeoBoundingBoxQueryBuilder type(String type) { + public GeoBoundingBoxQueryBuilder type(GeoExecType type) { + if (type == null) { + throw new IllegalArgumentException("Type is not allowed to be null."); + } this.type = type; return this; } + /** + * For BWC: Parse type from type name. + * */ + public GeoBoundingBoxQueryBuilder type(String type) { + this.type = GeoExecType.fromString(type); + return this; + } + /** Returns the execution type of the geo bounding box.*/ + public GeoExecType type() { + return type; + } + + /** Returns the name of the field to base the bounding box computation on. */ + public String fieldName() { + return this.fieldName; + } + + QueryValidationException checkLatLon(boolean indexCreatedBeforeV2_0) { + // validation was not available prior to 2.x, so to support bwc percolation queries we only ignore_malformed on 2.x created indexes + if (GeoValidationMethod.isIgnoreMalformed(validationMethod) == true || indexCreatedBeforeV2_0) { + return null; + } + + QueryValidationException validationException = null; + // For everything post 2.0 validate latitude and longitude unless validation was explicitly turned off + if (GeoUtils.isValidLatitude(topLeft.getLat()) == false) { + validationException = addValidationError("top latitude is invalid: " + topLeft.getLat(), + validationException); + } + if (GeoUtils.isValidLongitude(topLeft.getLon()) == false) { + validationException = addValidationError("left longitude is invalid: " + topLeft.getLon(), + validationException); + } + if (GeoUtils.isValidLatitude(bottomRight.getLat()) == false) { + validationException = addValidationError("bottom latitude is invalid: " + bottomRight.getLat(), + validationException); + } + if (GeoUtils.isValidLongitude(bottomRight.getLon()) == false) { + validationException = addValidationError("right longitude is invalid: " + bottomRight.getLon(), + validationException); + } + return validationException; + } + + @Override + public Query doToQuery(QueryShardContext context) { + QueryValidationException exception = checkLatLon(context.indexVersionCreated().before(Version.V_2_0_0)); + if (exception != null) { + throw new QueryShardException(context, "couldn't validate latitude/ longitude values", exception); + } + + GeoPoint luceneTopLeft = new GeoPoint(topLeft); + GeoPoint luceneBottomRight = new GeoPoint(bottomRight); + if (GeoValidationMethod.isCoerce(validationMethod)) { + // Special case: if the difference between the left and right is 360 and the right is greater than the left, we are asking for + // the complete longitude range so need to set longitude to the complete longditude range + double right = luceneBottomRight.getLon(); + double left = luceneTopLeft.getLon(); + + boolean completeLonRange = ((right - left) % 360 == 0 && right > left); + GeoUtils.normalizePoint(luceneTopLeft, true, !completeLonRange); + GeoUtils.normalizePoint(luceneBottomRight, true, !completeLonRange); + if (completeLonRange) { + luceneTopLeft.resetLon(-180); + luceneBottomRight.resetLon(180); + } + } + + MappedFieldType fieldType = context.fieldMapper(fieldName); + if (fieldType == null) { + throw new QueryShardException(context, "failed to find geo_point field [" + fieldName + "]"); + } + if (!(fieldType instanceof GeoPointFieldMapper.GeoPointFieldType)) { + throw new QueryShardException(context, "field [" + fieldName + "] is not a geo_point field"); + } + GeoPointFieldMapper.GeoPointFieldType geoFieldType = ((GeoPointFieldMapper.GeoPointFieldType) fieldType); + + Query result; + switch(type) { + case INDEXED: + result = IndexedGeoBoundingBoxQuery.create(luceneTopLeft, luceneBottomRight, geoFieldType); + break; + case MEMORY: + IndexGeoPointFieldData indexFieldData = context.getForField(fieldType); + result = new InMemoryGeoBoundingBoxQuery(luceneTopLeft, luceneBottomRight, indexFieldData); + break; + default: + // Someone extended the type enum w/o adjusting this switch statement. + throw new IllegalStateException("geo bounding box type [" + type + "] not supported."); + } + + return result; + } + @Override protected void doXContent(XContentBuilder builder, Params params) throws IOException { - // check values - if(Double.isNaN(box[TOP])) { - throw new IllegalArgumentException("geo_bounding_box requires top latitude to be set"); - } else if(Double.isNaN(box[BOTTOM])) { - throw new IllegalArgumentException("geo_bounding_box requires bottom latitude to be set"); - } else if(Double.isNaN(box[RIGHT])) { - throw new IllegalArgumentException("geo_bounding_box requires right longitude to be set"); - } else if(Double.isNaN(box[LEFT])) { - throw new IllegalArgumentException("geo_bounding_box requires left longitude to be set"); - } - - builder.startObject(GeoBoundingBoxQueryParser.NAME); + builder.startObject(NAME); - builder.startObject(name); - builder.array(TOP_LEFT, box[LEFT], box[TOP]); - builder.array(BOTTOM_RIGHT, box[RIGHT], box[BOTTOM]); + builder.startObject(fieldName); + builder.array(GeoBoundingBoxQueryParser.TOP_LEFT, topLeft.getLon(), topLeft.getLat()); + builder.array(GeoBoundingBoxQueryParser.BOTTOM_RIGHT, bottomRight.getLon(), bottomRight.getLat()); builder.endObject(); + builder.field("validation_method", validationMethod); + builder.field("type", type); - if (queryName != null) { - builder.field("_name", queryName); - } - if (type != null) { - builder.field("type", type); - } - if (coerce != null) { - builder.field("coerce", coerce); - } - if (ignoreMalformed != null) { - builder.field("ignore_malformed", ignoreMalformed); - } + printBoostAndQueryName(builder); builder.endObject(); } + + @Override + public boolean doEquals(GeoBoundingBoxQueryBuilder other) { + return Objects.equals(topLeft, other.topLeft) && + Objects.equals(bottomRight, other.bottomRight) && + Objects.equals(type, other.type) && + Objects.equals(validationMethod, other.validationMethod) && + Objects.equals(fieldName, other.fieldName); + } + + @Override + public int doHashCode() { + return Objects.hash(topLeft, bottomRight, type, validationMethod, fieldName); + } + + @Override + public GeoBoundingBoxQueryBuilder doReadFrom(StreamInput in) throws IOException { + String fieldName = in.readString(); + GeoBoundingBoxQueryBuilder geo = new GeoBoundingBoxQueryBuilder(fieldName); + geo.topLeft = geo.topLeft.readFrom(in); + geo.bottomRight = geo.bottomRight.readFrom(in); + geo.type = GeoExecType.readTypeFrom(in); + geo.validationMethod = GeoValidationMethod.readGeoValidationMethodFrom(in); + return geo; + } + + @Override + public void doWriteTo(StreamOutput out) throws IOException { + out.writeString(fieldName); + topLeft.writeTo(out); + bottomRight.writeTo(out); + type.writeTo(out); + validationMethod.writeTo(out); + } + + @Override + public String getWriteableName() { + return NAME; + } } diff --git a/core/src/main/java/org/elasticsearch/index/query/GeoBoundingBoxQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/GeoBoundingBoxQueryParser.java index 3d5d848f014..afa885324a5 100644 --- a/core/src/main/java/org/elasticsearch/index/query/GeoBoundingBoxQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/GeoBoundingBoxQueryParser.java @@ -19,57 +19,54 @@ package org.elasticsearch.index.query; -import org.apache.lucene.search.Query; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.Version; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoUtils; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.fielddata.IndexGeoPointFieldData; -import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper; -import org.elasticsearch.index.search.geo.InMemoryGeoBoundingBoxQuery; -import org.elasticsearch.index.search.geo.IndexedGeoBoundingBoxQuery; import java.io.IOException; -/** - * - */ -public class GeoBoundingBoxQueryParser implements QueryParser { +public class GeoBoundingBoxQueryParser implements QueryParser { public static final String NAME = "geo_bbox"; + /** Key to refer to the top of the bounding box. */ public static final String TOP = "top"; + /** Key to refer to the left of the bounding box. */ public static final String LEFT = "left"; + /** Key to refer to the right of the bounding box. */ public static final String RIGHT = "right"; + /** Key to refer to the bottom of the bounding box. */ public static final String BOTTOM = "bottom"; + /** Key to refer to top_left corner of bounding box. */ public static final String TOP_LEFT = TOP + "_" + LEFT; - public static final String TOP_RIGHT = TOP + "_" + RIGHT; - public static final String BOTTOM_LEFT = BOTTOM + "_" + LEFT; + /** Key to refer to bottom_right corner of bounding box. */ public static final String BOTTOM_RIGHT = BOTTOM + "_" + RIGHT; + /** Key to refer to top_right corner of bounding box. */ + public static final String TOP_RIGHT = TOP + "_" + RIGHT; + /** Key to refer to bottom left corner of bounding box. */ + public static final String BOTTOM_LEFT = BOTTOM + "_" + LEFT; + /** Key to refer to top_left corner of bounding box. */ public static final String TOPLEFT = "topLeft"; - public static final String TOPRIGHT = "topRight"; - public static final String BOTTOMLEFT = "bottomLeft"; + /** Key to refer to bottom_right corner of bounding box. */ public static final String BOTTOMRIGHT = "bottomRight"; + /** Key to refer to top_right corner of bounding box. */ + public static final String TOPRIGHT = "topRight"; + /** Key to refer to bottom left corner of bounding box. */ + public static final String BOTTOMLEFT = "bottomLeft"; public static final String FIELD = "field"; - @Inject - public GeoBoundingBoxQueryParser() { - } - @Override public String[] names() { - return new String[]{NAME, "geoBbox", "geo_bounding_box", "geoBoundingBox"}; + return new String[]{GeoBoundingBoxQueryBuilder.NAME, "geoBbox", "geo_bounding_box", "geoBoundingBox"}; } @Override - public Query parse(QueryParseContext parseContext) throws IOException, ParsingException { + public GeoBoundingBoxQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException { XContentParser parser = parseContext.parser(); String fieldName = null; @@ -78,16 +75,17 @@ public class GeoBoundingBoxQueryParser implements QueryParser { double bottom = Double.NaN; double left = Double.NaN; double right = Double.NaN; - + + float boost = AbstractQueryBuilder.DEFAULT_BOOST; String queryName = null; String currentFieldName = null; XContentParser.Token token; - final boolean indexCreatedBeforeV2_0 = parseContext.indexVersionCreated().before(Version.V_2_0_0); - boolean coerce = false; - boolean ignoreMalformed = false; + boolean coerce = GeoValidationMethod.DEFAULT_LENIENT_PARSING; + boolean ignoreMalformed = GeoValidationMethod.DEFAULT_LENIENT_PARSING; + GeoValidationMethod validationMethod = null; GeoPoint sparse = new GeoPoint(); - + String type = "memory"; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { @@ -140,74 +138,43 @@ public class GeoBoundingBoxQueryParser implements QueryParser { } else if (token.isValue()) { if ("_name".equals(currentFieldName)) { queryName = parser.text(); - } else if ("coerce".equals(currentFieldName) || (indexCreatedBeforeV2_0 && "normalize".equals(currentFieldName))) { + } else if ("boost".equals(currentFieldName)) { + boost = parser.floatValue(); + } else if ("coerce".equals(currentFieldName) || ("normalize".equals(currentFieldName))) { coerce = parser.booleanValue(); - if (coerce == true) { + if (coerce) { ignoreMalformed = true; } + } else if ("validation_method".equals(currentFieldName)) { + validationMethod = GeoValidationMethod.fromString(parser.text()); } else if ("type".equals(currentFieldName)) { type = parser.text(); - } else if ("ignore_malformed".equals(currentFieldName) && coerce == false) { + } else if ("ignore_malformed".equals(currentFieldName)) { ignoreMalformed = parser.booleanValue(); } else { - throw new ParsingException(parseContext, "failed to parse [{}] query. unexpected field [{}]", NAME, currentFieldName); + throw new ParsingException(parser.getTokenLocation(), "failed to parse [{}] query. unexpected field [{}]", NAME, currentFieldName); } } } final GeoPoint topLeft = sparse.reset(top, left); //just keep the object final GeoPoint bottomRight = new GeoPoint(bottom, right); - - // validation was not available prior to 2.x, so to support bwc percolation queries we only ignore_malformed on 2.x created indexes - if (!indexCreatedBeforeV2_0 && !ignoreMalformed) { - if (topLeft.lat() > 90.0 || topLeft.lat() < -90.0) { - throw new ParsingException(parseContext, "illegal latitude value [{}] for [{}]", topLeft.lat(), NAME); - } - if (topLeft.lon() > 180.0 || topLeft.lon() < -180) { - throw new ParsingException(parseContext, "illegal longitude value [{}] for [{}]", topLeft.lon(), NAME); - } - if (bottomRight.lat() > 90.0 || bottomRight.lat() < -90.0) { - throw new ParsingException(parseContext, "illegal latitude value [{}] for [{}]", bottomRight.lat(), NAME); - } - if (bottomRight.lon() > 180.0 || bottomRight.lon() < -180) { - throw new ParsingException(parseContext, "illegal longitude value [{}] for [{}]", bottomRight.lon(), NAME); - } - } - - if (coerce) { - // Special case: if the difference between the left and right is 360 and the right is greater than the left, we are asking for - // the complete longitude range so need to set longitude to the complete longditude range - boolean completeLonRange = ((right - left) % 360 == 0 && right > left); - GeoUtils.normalizePoint(topLeft, true, !completeLonRange); - GeoUtils.normalizePoint(bottomRight, true, !completeLonRange); - if (completeLonRange) { - topLeft.resetLon(-180); - bottomRight.resetLon(180); - } - } - - MappedFieldType fieldType = parseContext.fieldMapper(fieldName); - if (fieldType == null) { - throw new ParsingException(parseContext, "failed to parse [{}] query. could not find [{}] field [{}]", NAME, GeoPointFieldMapper.CONTENT_TYPE, fieldName); - } - if (!(fieldType instanceof GeoPointFieldMapper.GeoPointFieldType)) { - throw new ParsingException(parseContext, "failed to parse [{}] query. field [{}] is expected to be of type [{}], but is of [{}] type instead", NAME, fieldName, GeoPointFieldMapper.CONTENT_TYPE, fieldType.typeName()); - } - GeoPointFieldMapper.GeoPointFieldType geoFieldType = ((GeoPointFieldMapper.GeoPointFieldType) fieldType); - - Query filter; - if ("indexed".equals(type)) { - filter = IndexedGeoBoundingBoxQuery.create(topLeft, bottomRight, geoFieldType); - } else if ("memory".equals(type)) { - IndexGeoPointFieldData indexFieldData = parseContext.getForField(fieldType); - filter = new InMemoryGeoBoundingBoxQuery(topLeft, bottomRight, indexFieldData); + GeoBoundingBoxQueryBuilder builder = new GeoBoundingBoxQueryBuilder(fieldName); + builder.setCorners(topLeft, bottomRight); + builder.queryName(queryName); + builder.boost(boost); + builder.type(GeoExecType.fromString(type)); + if (validationMethod != null) { + // ignore deprecated coerce/ignoreMalformed settings if validationMethod is set + builder.setValidationMethod(validationMethod); } else { - throw new ParsingException(parseContext, "failed to parse [{}] query. geo bounding box type [{}] is not supported. either [indexed] or [memory] are allowed", NAME, type); + builder.setValidationMethod(GeoValidationMethod.infer(coerce, ignoreMalformed)); } + return builder; + } - if (queryName != null) { - parseContext.addNamedQuery(queryName, filter); - } - return filter; - } + @Override + public GeoBoundingBoxQueryBuilder getBuilderPrototype() { + return GeoBoundingBoxQueryBuilder.PROTOTYPE; + } } diff --git a/core/src/main/java/org/elasticsearch/index/query/GeoDistanceQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/GeoDistanceQueryBuilder.java index 77c8f944864..7d6066eb968 100644 --- a/core/src/main/java/org/elasticsearch/index/query/GeoDistanceQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/GeoDistanceQueryBuilder.java @@ -19,122 +19,283 @@ package org.elasticsearch.index.query; +import org.apache.lucene.search.Query; +import org.elasticsearch.Version; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.geo.GeoDistance; +import org.elasticsearch.common.geo.GeoPoint; +import org.elasticsearch.common.geo.GeoUtils; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.fielddata.IndexGeoPointFieldData; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper; +import org.elasticsearch.index.search.geo.GeoDistanceRangeQuery; import java.io.IOException; import java.util.Locale; +import java.util.Objects; -public class GeoDistanceQueryBuilder extends QueryBuilder { +/** + * Filter results of a query to include only those within a specific distance to some + * geo point. + * */ +public class GeoDistanceQueryBuilder extends AbstractQueryBuilder { - private final String name; + /** Name of the query in the query dsl. */ + public static final String NAME = "geo_distance"; + /** Default for latitude normalization (as of this writing true).*/ + public static final boolean DEFAULT_NORMALIZE_LAT = true; + /** Default for longitude normalization (as of this writing true). */ + public static final boolean DEFAULT_NORMALIZE_LON = true; + /** Default for distance unit computation. */ + public static final DistanceUnit DEFAULT_DISTANCE_UNIT = DistanceUnit.DEFAULT; + /** Default for geo distance computation. */ + public static final GeoDistance DEFAULT_GEO_DISTANCE = GeoDistance.DEFAULT; + /** Default for optimising query through pre computed bounding box query. */ + public static final String DEFAULT_OPTIMIZE_BBOX = "memory"; - private String distance; + private final String fieldName; + /** Distance from center to cover. */ + private double distance; + /** Point to use as center. */ + private GeoPoint center = new GeoPoint(Double.NaN, Double.NaN); + /** Algorithm to use for distance computation. */ + private GeoDistance geoDistance = DEFAULT_GEO_DISTANCE; + /** Whether or not to use a bbox for pre-filtering. TODO change to enum? */ + private String optimizeBbox = DEFAULT_OPTIMIZE_BBOX; + /** How strict should geo coordinate validation be? */ + private GeoValidationMethod validationMethod = GeoValidationMethod.DEFAULT; - private double lat; + static final GeoDistanceQueryBuilder PROTOTYPE = new GeoDistanceQueryBuilder("_na_"); - private double lon; - - private String geohash; - - private GeoDistance geoDistance; - - private String optimizeBbox; - - private String queryName; - - private Boolean coerce; - - private Boolean ignoreMalformed; - - public GeoDistanceQueryBuilder(String name) { - this.name = name; + /** + * Construct new GeoDistanceQueryBuilder. + * @param fieldName name of indexed geo field to operate distance computation on. + * */ + public GeoDistanceQueryBuilder(String fieldName) { + if (Strings.isEmpty(fieldName)) { + throw new IllegalArgumentException("fieldName must not be null or empty"); + } + this.fieldName = fieldName; } + /** Name of the field this query is operating on. */ + public String fieldName() { + return this.fieldName; + } + + /** Sets the center point for the query. + * @param point the center of the query + **/ + public GeoDistanceQueryBuilder point(GeoPoint point) { + if (point == null) { + throw new IllegalArgumentException("center point must not be null"); + } + this.center = point; + return this; + } + + /** + * Sets the center point of the query. + * @param lat latitude of center + * @param lon longitude of center + * */ public GeoDistanceQueryBuilder point(double lat, double lon) { - this.lat = lat; - this.lon = lon; + this.center = new GeoPoint(lat, lon); return this; } - public GeoDistanceQueryBuilder lat(double lat) { - this.lat = lat; - return this; - } - - public GeoDistanceQueryBuilder lon(double lon) { - this.lon = lon; - return this; + /** Returns the center point of the distance query. */ + public GeoPoint point() { + return this.center; } + /** Sets the distance from the center using the default distance unit.*/ public GeoDistanceQueryBuilder distance(String distance) { - this.distance = distance; + return distance(distance, DistanceUnit.DEFAULT); + } + + /** Sets the distance from the center for this query. */ + public GeoDistanceQueryBuilder distance(String distance, DistanceUnit unit) { + if (Strings.isEmpty(distance)) { + throw new IllegalArgumentException("distance must not be null or empty"); + } + if (unit == null) { + throw new IllegalArgumentException("distance unit must not be null"); + } + this.distance = DistanceUnit.parse(distance, unit, DistanceUnit.DEFAULT); return this; } + /** Sets the distance from the center for this query. */ public GeoDistanceQueryBuilder distance(double distance, DistanceUnit unit) { - this.distance = unit.toString(distance); - return this; + return distance(Double.toString(distance), unit); } + /** Returns the distance configured as radius. */ + public double distance() { + return distance; + } + + /** Sets the center point for this query. */ public GeoDistanceQueryBuilder geohash(String geohash) { - this.geohash = geohash; + if (Strings.isEmpty(geohash)) { + throw new IllegalArgumentException("geohash must not be null or empty"); + } + this.center.resetFromGeoHash(geohash); return this; } + /** Which type of geo distance calculation method to use. */ public GeoDistanceQueryBuilder geoDistance(GeoDistance geoDistance) { + if (geoDistance == null) { + throw new IllegalArgumentException("geoDistance must not be null"); + } this.geoDistance = geoDistance; return this; } + /** Returns geo distance calculation type to use. */ + public GeoDistance geoDistance() { + return this.geoDistance; + } + + /** + * Set this to memory or indexed if before running the distance + * calculation you want to limit the candidates to hits in the + * enclosing bounding box. + **/ public GeoDistanceQueryBuilder optimizeBbox(String optimizeBbox) { + if (optimizeBbox == null) { + throw new IllegalArgumentException("optimizeBox must not be null"); + } + switch (optimizeBbox) { + case "none": + case "memory": + case "indexed": + break; + default: + throw new IllegalArgumentException("optimizeBbox must be one of [none, memory, indexed]"); + } this.optimizeBbox = optimizeBbox; return this; } /** - * Sets the filter name for the filter that can be used when searching for matched_filters per hit. - */ - public GeoDistanceQueryBuilder queryName(String queryName) { - this.queryName = queryName; - return this; + * Returns whether or not to run a BoundingBox query prior to + * distance query for optimization purposes.*/ + public String optimizeBbox() { + return this.optimizeBbox; } - public GeoDistanceQueryBuilder coerce(boolean coerce) { - this.coerce = coerce; - return this; + /** Set validaton method for geo coordinates. */ + public void setValidationMethod(GeoValidationMethod method) { + this.validationMethod = method; } - public GeoDistanceQueryBuilder ignoreMalformed(boolean ignoreMalformed) { - this.ignoreMalformed = ignoreMalformed; - return this; + /** Returns validation method for geo coordinates. */ + public GeoValidationMethod getValidationMethod() { + return this.validationMethod; + } + + @Override + protected Query doToQuery(QueryShardContext shardContext) throws IOException { + QueryValidationException exception = checkLatLon(shardContext.indexVersionCreated().before(Version.V_2_0_0)); + if (exception != null) { + throw new QueryShardException(shardContext, "couldn't validate latitude/ longitude values", exception); + } + + if (GeoValidationMethod.isCoerce(validationMethod)) { + GeoUtils.normalizePoint(center, true, true); + } + + double normDistance = geoDistance.normalize(this.distance, DistanceUnit.DEFAULT); + + MappedFieldType fieldType = shardContext.fieldMapper(fieldName); + if (fieldType == null) { + throw new QueryShardException(shardContext, "failed to find geo_point field [" + fieldName + "]"); + } + if (!(fieldType instanceof GeoPointFieldMapper.GeoPointFieldType)) { + throw new QueryShardException(shardContext, "field [" + fieldName + "] is not a geo_point field"); + } + GeoPointFieldMapper.GeoPointFieldType geoFieldType = ((GeoPointFieldMapper.GeoPointFieldType) fieldType); + + IndexGeoPointFieldData indexFieldData = shardContext.getForField(fieldType); + Query query = new GeoDistanceRangeQuery(center, null, normDistance, true, false, geoDistance, geoFieldType, indexFieldData, optimizeBbox); + return query; } @Override protected void doXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(GeoDistanceQueryParser.NAME); - if (geohash != null) { - builder.field(name, geohash); - } else { - builder.startArray(name).value(lon).value(lat).endArray(); - } + builder.startObject(NAME); + builder.startArray(fieldName).value(center.lon()).value(center.lat()).endArray(); builder.field("distance", distance); - if (geoDistance != null) { - builder.field("distance_type", geoDistance.name().toLowerCase(Locale.ROOT)); - } - if (optimizeBbox != null) { - builder.field("optimize_bbox", optimizeBbox); - } - if (queryName != null) { - builder.field("_name", queryName); - } - if (coerce != null) { - builder.field("coerce", coerce); - } - if (ignoreMalformed != null) { - builder.field("ignore_malformed", ignoreMalformed); - } + builder.field("distance_type", geoDistance.name().toLowerCase(Locale.ROOT)); + builder.field("optimize_bbox", optimizeBbox); + builder.field("validation_method", validationMethod); + printBoostAndQueryName(builder); builder.endObject(); } + + @Override + public int doHashCode() { + return Objects.hash(center, geoDistance, optimizeBbox, distance, validationMethod); + } + + @Override + public boolean doEquals(GeoDistanceQueryBuilder other) { + return Objects.equals(fieldName, other.fieldName) && + (distance == other.distance) && + Objects.equals(validationMethod, other.validationMethod) && + Objects.equals(center, other.center) && + Objects.equals(optimizeBbox, other.optimizeBbox) && + Objects.equals(geoDistance, other.geoDistance); + } + + @Override + protected GeoDistanceQueryBuilder doReadFrom(StreamInput in) throws IOException { + String fieldName = in.readString(); + GeoDistanceQueryBuilder result = new GeoDistanceQueryBuilder(fieldName); + result.distance = in.readDouble(); + result.validationMethod = GeoValidationMethod.readGeoValidationMethodFrom(in); + result.center = GeoPoint.readGeoPointFrom(in); + result.optimizeBbox = in.readString(); + result.geoDistance = GeoDistance.readGeoDistanceFrom(in); + return result; + } + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + out.writeString(fieldName); + out.writeDouble(distance); + validationMethod.writeTo(out); + center.writeTo(out); + out.writeString(optimizeBbox); + geoDistance.writeTo(out); + } + + private QueryValidationException checkLatLon(boolean indexCreatedBeforeV2_0) { + // validation was not available prior to 2.x, so to support bwc percolation queries we only ignore_malformed on 2.x created indexes + if (GeoValidationMethod.isIgnoreMalformed(validationMethod) || indexCreatedBeforeV2_0) { + return null; + } + + QueryValidationException validationException = null; + // For everything post 2.0, validate latitude and longitude unless validation was explicitly turned off + if (GeoUtils.isValidLatitude(center.getLat()) == false) { + validationException = addValidationError("center point latitude is invalid: " + center.getLat(), validationException); + } + if (GeoUtils.isValidLongitude(center.getLon()) == false) { + validationException = addValidationError("center point longitude is invalid: " + center.getLon(), validationException); + } + return validationException; + } + + @Override + public String getWriteableName() { + return NAME; + } } diff --git a/core/src/main/java/org/elasticsearch/index/query/GeoDistanceQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/GeoDistanceQueryParser.java index e0514f4e693..da9a7c2f07e 100644 --- a/core/src/main/java/org/elasticsearch/index/query/GeoDistanceQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/GeoDistanceQueryParser.java @@ -19,23 +19,19 @@ package org.elasticsearch.index.query; -import org.apache.lucene.search.Query; -import org.elasticsearch.Version; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.geo.GeoDistance; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoUtils; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.fielddata.IndexGeoPointFieldData; -import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper; -import org.elasticsearch.index.search.geo.GeoDistanceRangeQuery; import java.io.IOException; /** + * Parses a GeoDistanceQuery. See also + * *

  * {
  *     "name.lat" : 1.1,
@@ -43,37 +39,32 @@ import java.io.IOException;
  * }
  * 
*/ -public class GeoDistanceQueryParser implements QueryParser { - - public static final String NAME = "geo_distance"; - - @Inject - public GeoDistanceQueryParser() { - } +public class GeoDistanceQueryParser implements QueryParser { @Override public String[] names() { - return new String[]{NAME, "geoDistance"}; + return new String[]{GeoDistanceQueryBuilder.NAME, "geoDistance"}; } @Override - public Query parse(QueryParseContext parseContext) throws IOException, ParsingException { + public GeoDistanceQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException { XContentParser parser = parseContext.parser(); XContentParser.Token token; + float boost = AbstractQueryBuilder.DEFAULT_BOOST; String queryName = null; String currentFieldName = null; - GeoPoint point = new GeoPoint(); + GeoPoint point = new GeoPoint(Double.NaN, Double.NaN); String fieldName = null; - double distance = 0; Object vDistance = null; - DistanceUnit unit = DistanceUnit.DEFAULT; - GeoDistance geoDistance = GeoDistance.DEFAULT; - String optimizeBbox = "memory"; - final boolean indexCreatedBeforeV2_0 = parseContext.indexVersionCreated().before(Version.V_2_0_0); - boolean coerce = false; - boolean ignoreMalformed = false; + DistanceUnit unit = GeoDistanceQueryBuilder.DEFAULT_DISTANCE_UNIT; + GeoDistance geoDistance = GeoDistanceQueryBuilder.DEFAULT_GEO_DISTANCE; + String optimizeBbox = GeoDistanceQueryBuilder.DEFAULT_OPTIMIZE_BBOX; + boolean coerce = GeoValidationMethod.DEFAULT_LENIENT_PARSING; + boolean ignoreMalformed = GeoValidationMethod.DEFAULT_LENIENT_PARSING; + GeoValidationMethod validationMethod = null; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); @@ -85,6 +76,7 @@ public class GeoDistanceQueryParser implements QueryParser { } else if (token == XContentParser.Token.START_OBJECT) { // the json in the format of -> field : { lat : 30, lon : 12 } String currentName = parser.currentName(); + assert currentFieldName != null; fieldName = currentFieldName; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { @@ -97,21 +89,21 @@ public class GeoDistanceQueryParser implements QueryParser { } else if (currentName.equals(GeoPointFieldMapper.Names.GEOHASH)) { point.resetFromGeoHash(parser.text()); } else { - throw new ParsingException(parseContext, "[geo_distance] query does not support [" + currentFieldName + throw new ParsingException(parser.getTokenLocation(), "[geo_distance] query does not support [" + currentFieldName + "]"); } } } } else if (token.isValue()) { - if (currentFieldName.equals("distance")) { + if ("distance".equals(currentFieldName)) { if (token == XContentParser.Token.VALUE_STRING) { vDistance = parser.text(); // a String } else { vDistance = parser.numberValue(); // a Number } - } else if (currentFieldName.equals("unit")) { + } else if ("unit".equals(currentFieldName)) { unit = DistanceUnit.fromString(parser.text()); - } else if (currentFieldName.equals("distance_type") || currentFieldName.equals("distanceType")) { + } else if ("distance_type".equals(currentFieldName) || "distanceType".equals(currentFieldName)) { geoDistance = GeoDistance.fromString(parser.text()); } else if (currentFieldName.endsWith(GeoPointFieldMapper.Names.LAT_SUFFIX)) { point.resetLat(parser.doubleValue()); @@ -124,15 +116,19 @@ public class GeoDistanceQueryParser implements QueryParser { fieldName = currentFieldName.substring(0, currentFieldName.length() - GeoPointFieldMapper.Names.GEOHASH_SUFFIX.length()); } else if ("_name".equals(currentFieldName)) { queryName = parser.text(); + } else if ("boost".equals(currentFieldName)) { + boost = parser.floatValue(); } else if ("optimize_bbox".equals(currentFieldName) || "optimizeBbox".equals(currentFieldName)) { optimizeBbox = parser.textOrNull(); - } else if ("coerce".equals(currentFieldName) || (indexCreatedBeforeV2_0 && "normalize".equals(currentFieldName))) { + } else if ("coerce".equals(currentFieldName) || ("normalize".equals(currentFieldName))) { coerce = parser.booleanValue(); if (coerce == true) { ignoreMalformed = true; } - } else if ("ignore_malformed".equals(currentFieldName) && coerce == false) { + } else if ("ignore_malformed".equals(currentFieldName)) { ignoreMalformed = parser.booleanValue(); + } else if ("validation_method".equals(currentFieldName)) { + validationMethod = GeoValidationMethod.fromString(parser.text()); } else { point.resetFromString(parser.text()); fieldName = currentFieldName; @@ -140,44 +136,31 @@ public class GeoDistanceQueryParser implements QueryParser { } } - // validation was not available prior to 2.x, so to support bwc percolation queries we only ignore_malformed on 2.x created indexes - if (!indexCreatedBeforeV2_0 && !ignoreMalformed) { - if (point.lat() > 90.0 || point.lat() < -90.0) { - throw new ParsingException(parseContext, "illegal latitude value [{}] for [{}]", point.lat(), NAME); - } - if (point.lon() > 180.0 || point.lon() < -180) { - throw new ParsingException(parseContext, "illegal longitude value [{}] for [{}]", point.lon(), NAME); - } - } - - if (coerce) { - GeoUtils.normalizePoint(point, coerce, coerce); - } - if (vDistance == null) { - throw new ParsingException(parseContext, "geo_distance requires 'distance' to be specified"); - } else if (vDistance instanceof Number) { - distance = DistanceUnit.DEFAULT.convert(((Number) vDistance).doubleValue(), unit); + throw new ParsingException(parser.getTokenLocation(), "geo_distance requires 'distance' to be specified"); + } + + GeoDistanceQueryBuilder qb = new GeoDistanceQueryBuilder(fieldName); + if (vDistance instanceof Number) { + qb.distance(((Number) vDistance).doubleValue(), unit); } else { - distance = DistanceUnit.parse((String) vDistance, unit, DistanceUnit.DEFAULT); + qb.distance((String) vDistance, unit); } - distance = geoDistance.normalize(distance, DistanceUnit.DEFAULT); + qb.point(point); + if (validationMethod != null) { + qb.setValidationMethod(validationMethod); + } else { + qb.setValidationMethod(GeoValidationMethod.infer(coerce, ignoreMalformed)); + } + qb.optimizeBbox(optimizeBbox); + qb.geoDistance(geoDistance); + qb.boost(boost); + qb.queryName(queryName); + return qb; + } - MappedFieldType fieldType = parseContext.fieldMapper(fieldName); - if (fieldType == null) { - throw new ParsingException(parseContext, "failed to find geo_point field [" + fieldName + "]"); - } - if (!(fieldType instanceof GeoPointFieldMapper.GeoPointFieldType)) { - throw new ParsingException(parseContext, "field [" + fieldName + "] is not a geo_point field"); - } - GeoPointFieldMapper.GeoPointFieldType geoFieldType = ((GeoPointFieldMapper.GeoPointFieldType) fieldType); - - - IndexGeoPointFieldData indexFieldData = parseContext.getForField(fieldType); - Query query = new GeoDistanceRangeQuery(point, null, distance, true, false, geoDistance, geoFieldType, indexFieldData, optimizeBbox); - if (queryName != null) { - parseContext.addNamedQuery(queryName, query); - } - return query; + @Override + public GeoDistanceQueryBuilder getBuilderPrototype() { + return GeoDistanceQueryBuilder.PROTOTYPE; } } diff --git a/core/src/main/java/org/elasticsearch/index/query/GeoDistanceRangeQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/GeoDistanceRangeQueryBuilder.java index 6aa6f0fd9d7..9a224fa260c 100644 --- a/core/src/main/java/org/elasticsearch/index/query/GeoDistanceRangeQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/GeoDistanceRangeQueryBuilder.java @@ -19,161 +19,309 @@ package org.elasticsearch.index.query; +import org.apache.lucene.search.Query; +import org.elasticsearch.Version; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.geo.GeoDistance; +import org.elasticsearch.common.geo.GeoPoint; +import org.elasticsearch.common.geo.GeoUtils; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.fielddata.IndexGeoPointFieldData; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper; +import org.elasticsearch.index.search.geo.GeoDistanceRangeQuery; import java.io.IOException; import java.util.Locale; +import java.util.Objects; -public class GeoDistanceRangeQueryBuilder extends QueryBuilder { +public class GeoDistanceRangeQueryBuilder extends AbstractQueryBuilder { - private final String name; + public static final String NAME = "geo_distance_range"; + public static final boolean DEFAULT_INCLUDE_LOWER = true; + public static final boolean DEFAULT_INCLUDE_UPPER = true; + public static final GeoDistance DEFAULT_GEO_DISTANCE = GeoDistance.DEFAULT; + public static final DistanceUnit DEFAULT_UNIT = DistanceUnit.DEFAULT; + public static final String DEFAULT_OPTIMIZE_BBOX = "memory"; + + private final String fieldName; private Object from; private Object to; - private boolean includeLower = true; - private boolean includeUpper = true; + private boolean includeLower = DEFAULT_INCLUDE_LOWER; + private boolean includeUpper = DEFAULT_INCLUDE_UPPER; - private double lat; + private final GeoPoint point; - private double lon; + private GeoDistance geoDistance = DEFAULT_GEO_DISTANCE; - private String geohash; + private DistanceUnit unit = DEFAULT_UNIT; - private GeoDistance geoDistance; + private String optimizeBbox = DEFAULT_OPTIMIZE_BBOX; - private String queryName; + private GeoValidationMethod validationMethod = GeoValidationMethod.DEFAULT; - private String optimizeBbox; + static final GeoDistanceRangeQueryBuilder PROTOTYPE = new GeoDistanceRangeQueryBuilder("_na_", new GeoPoint()); - private Boolean coerce; - - private Boolean ignoreMalformed; - - public GeoDistanceRangeQueryBuilder(String name) { - this.name = name; + public GeoDistanceRangeQueryBuilder(String fieldName, GeoPoint point) { + if (Strings.isEmpty(fieldName)) { + throw new IllegalArgumentException("fieldName must not be null"); + } + if (point == null) { + throw new IllegalArgumentException("point must not be null"); + } + this.fieldName = fieldName; + this.point = point; } - public GeoDistanceRangeQueryBuilder point(double lat, double lon) { - this.lat = lat; - this.lon = lon; - return this; + public GeoDistanceRangeQueryBuilder(String fieldName, double lat, double lon) { + this(fieldName, new GeoPoint(lat, lon)); } - public GeoDistanceRangeQueryBuilder lat(double lat) { - this.lat = lat; - return this; + public GeoDistanceRangeQueryBuilder(String fieldName, String geohash) { + this(fieldName, geohash == null ? null : new GeoPoint().resetFromGeoHash(geohash)); } - public GeoDistanceRangeQueryBuilder lon(double lon) { - this.lon = lon; - return this; + public String fieldName() { + return fieldName; } - public GeoDistanceRangeQueryBuilder from(Object from) { + public GeoPoint point() { + return point; + } + + public GeoDistanceRangeQueryBuilder from(String from) { + if (from == null) { + throw new IllegalArgumentException("[from] must not be null"); + } this.from = from; return this; } - public GeoDistanceRangeQueryBuilder to(Object to) { - this.to = to; - return this; - } - - public GeoDistanceRangeQueryBuilder gt(Object from) { + public GeoDistanceRangeQueryBuilder from(Number from) { + if (from == null) { + throw new IllegalArgumentException("[from] must not be null"); + } this.from = from; - this.includeLower = false; return this; } - public GeoDistanceRangeQueryBuilder gte(Object from) { - this.from = from; - this.includeLower = true; - return this; + public Object from() { + return from; } - public GeoDistanceRangeQueryBuilder lt(Object to) { + public GeoDistanceRangeQueryBuilder to(String to) { + if (to == null) { + throw new IllegalArgumentException("[to] must not be null"); + } this.to = to; - this.includeUpper = false; return this; } - public GeoDistanceRangeQueryBuilder lte(Object to) { + public GeoDistanceRangeQueryBuilder to(Number to) { + if (to == null) { + throw new IllegalArgumentException("[to] must not be null"); + } this.to = to; - this.includeUpper = true; return this; } + public Object to() { + return to; + } + public GeoDistanceRangeQueryBuilder includeLower(boolean includeLower) { this.includeLower = includeLower; return this; } + public boolean includeLower() { + return includeLower; + } + public GeoDistanceRangeQueryBuilder includeUpper(boolean includeUpper) { this.includeUpper = includeUpper; return this; } - public GeoDistanceRangeQueryBuilder geohash(String geohash) { - this.geohash = geohash; - return this; + public boolean includeUpper() { + return includeUpper; } public GeoDistanceRangeQueryBuilder geoDistance(GeoDistance geoDistance) { + if (geoDistance == null) { + throw new IllegalArgumentException("geoDistance calculation mode must not be null"); + } this.geoDistance = geoDistance; return this; } + public GeoDistance geoDistance() { + return geoDistance; + } + + public GeoDistanceRangeQueryBuilder unit(DistanceUnit unit) { + if (unit == null) { + throw new IllegalArgumentException("distance unit must not be null"); + } + this.unit = unit; + return this; + } + + public DistanceUnit unit() { + return unit; + } + public GeoDistanceRangeQueryBuilder optimizeBbox(String optimizeBbox) { + if (optimizeBbox == null) { + throw new IllegalArgumentException("optimizeBox must not be null"); + } + switch (optimizeBbox) { + case "none": + case "memory": + case "indexed": + break; + default: + throw new IllegalArgumentException("optimizeBbox must be one of [none, memory, indexed]"); + } this.optimizeBbox = optimizeBbox; return this; } - public GeoDistanceRangeQueryBuilder coerce(boolean coerce) { - this.coerce = coerce; - return this; + public String optimizeBbox() { + return optimizeBbox; } - public GeoDistanceRangeQueryBuilder ignoreMalformed(boolean ignoreMalformed) { - this.ignoreMalformed = ignoreMalformed; + /** Set validation method for coordinates. */ + public GeoDistanceRangeQueryBuilder setValidationMethod(GeoValidationMethod method) { + this.validationMethod = method; return this; } + + /** Returns validation method for coordinates. */ + public GeoValidationMethod getValidationMethod(GeoValidationMethod method) { + return this.validationMethod; + } - /** - * Sets the filter name for the filter that can be used when searching for matched_filters per hit. - */ - public GeoDistanceRangeQueryBuilder queryName(String queryName) { - this.queryName = queryName; - return this; + @Override + protected Query doToQuery(QueryShardContext context) throws IOException { + + final boolean indexCreatedBeforeV2_0 = context.indexVersionCreated().before(Version.V_2_0_0); + // validation was not available prior to 2.x, so to support bwc + // percolation queries we only ignore_malformed on 2.x created indexes + if (!indexCreatedBeforeV2_0 && !GeoValidationMethod.isIgnoreMalformed(validationMethod)) { + if (!GeoUtils.isValidLatitude(point.lat())) { + throw new QueryShardException(context, "illegal latitude value [{}] for [{}]", point.lat(), NAME); + } + if (!GeoUtils.isValidLongitude(point.lon())) { + throw new QueryShardException(context, "illegal longitude value [{}] for [{}]", point.lon(), NAME); + } + } + + if (GeoValidationMethod.isCoerce(validationMethod)) { + GeoUtils.normalizePoint(point, true, true); + } + + Double fromValue = null; + Double toValue = null; + if (from != null) { + if (from instanceof Number) { + fromValue = unit.toMeters(((Number) from).doubleValue()); + } else { + fromValue = DistanceUnit.parse((String) from, unit, DistanceUnit.DEFAULT); + } + fromValue = geoDistance.normalize(fromValue, DistanceUnit.DEFAULT); + } + if (to != null) { + if (to instanceof Number) { + toValue = unit.toMeters(((Number) to).doubleValue()); + } else { + toValue = DistanceUnit.parse((String) to, unit, DistanceUnit.DEFAULT); + } + toValue = geoDistance.normalize(toValue, DistanceUnit.DEFAULT); + } + + MappedFieldType fieldType = context.fieldMapper(fieldName); + if (fieldType == null) { + throw new QueryShardException(context, "failed to find geo_point field [" + fieldName + "]"); + } + if (!(fieldType instanceof GeoPointFieldMapper.GeoPointFieldType)) { + throw new QueryShardException(context, "field [" + fieldName + "] is not a geo_point field"); + } + GeoPointFieldMapper.GeoPointFieldType geoFieldType = ((GeoPointFieldMapper.GeoPointFieldType) fieldType); + + IndexGeoPointFieldData indexFieldData = context.getForField(fieldType); + return new GeoDistanceRangeQuery(point, fromValue, toValue, includeLower, includeUpper, geoDistance, geoFieldType, + indexFieldData, optimizeBbox); } @Override protected void doXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(GeoDistanceRangeQueryParser.NAME); - if (geohash != null) { - builder.field(name, geohash); - } else { - builder.startArray(name).value(lon).value(lat).endArray(); - } - builder.field("from", from); - builder.field("to", to); - builder.field("include_lower", includeLower); - builder.field("include_upper", includeUpper); - if (geoDistance != null) { - builder.field("distance_type", geoDistance.name().toLowerCase(Locale.ROOT)); - } - if (optimizeBbox != null) { - builder.field("optimize_bbox", optimizeBbox); - } - if (queryName != null) { - builder.field("_name", queryName); - } - if (coerce != null) { - builder.field("coerce", coerce); - } - if (ignoreMalformed != null) { - builder.field("ignore_malformed", ignoreMalformed); - } + builder.startObject(NAME); + builder.startArray(fieldName).value(point.lon()).value(point.lat()).endArray(); + builder.field(GeoDistanceRangeQueryParser.FROM_FIELD.getPreferredName(), from); + builder.field(GeoDistanceRangeQueryParser.TO_FIELD.getPreferredName(), to); + builder.field(GeoDistanceRangeQueryParser.INCLUDE_LOWER_FIELD.getPreferredName(), includeLower); + builder.field(GeoDistanceRangeQueryParser.INCLUDE_UPPER_FIELD.getPreferredName(), includeUpper); + builder.field(GeoDistanceRangeQueryParser.UNIT_FIELD.getPreferredName(), unit); + builder.field(GeoDistanceRangeQueryParser.DISTANCE_TYPE_FIELD.getPreferredName(), geoDistance.name().toLowerCase(Locale.ROOT)); + builder.field(GeoDistanceRangeQueryParser.OPTIMIZE_BBOX_FIELD.getPreferredName(), optimizeBbox); + builder.field(GeoDistanceRangeQueryParser.VALIDATION_METHOD.getPreferredName(), validationMethod); + printBoostAndQueryName(builder); builder.endObject(); } + + @Override + protected GeoDistanceRangeQueryBuilder doReadFrom(StreamInput in) throws IOException { + GeoDistanceRangeQueryBuilder queryBuilder = new GeoDistanceRangeQueryBuilder(in.readString(), GeoPoint.readGeoPointFrom(in)); + queryBuilder.from = in.readGenericValue(); + queryBuilder.to = in.readGenericValue(); + queryBuilder.includeLower = in.readBoolean(); + queryBuilder.includeUpper = in.readBoolean(); + queryBuilder.unit = DistanceUnit.valueOf(in.readString()); + queryBuilder.geoDistance = GeoDistance.readGeoDistanceFrom(in); + queryBuilder.optimizeBbox = in.readString(); + queryBuilder.validationMethod = GeoValidationMethod.readGeoValidationMethodFrom(in); + return queryBuilder; + } + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + out.writeString(fieldName); + point.writeTo(out); + out.writeGenericValue(from); + out.writeGenericValue(to); + out.writeBoolean(includeLower); + out.writeBoolean(includeUpper); + out.writeString(unit.name()); + geoDistance.writeTo(out);; + out.writeString(optimizeBbox); + validationMethod.writeTo(out); + } + + @Override + protected boolean doEquals(GeoDistanceRangeQueryBuilder other) { + return ((Objects.equals(fieldName, other.fieldName)) && + (Objects.equals(point, other.point)) && + (Objects.equals(from, other.from)) && + (Objects.equals(to, other.to)) && + (Objects.equals(includeUpper, other.includeUpper)) && + (Objects.equals(includeLower, other.includeLower)) && + (Objects.equals(geoDistance, other.geoDistance)) && + (Objects.equals(optimizeBbox, other.optimizeBbox)) && + (Objects.equals(validationMethod, other.validationMethod))); + } + + @Override + protected int doHashCode() { + return Objects.hash(fieldName, point, from, to, includeUpper, includeLower, geoDistance, optimizeBbox, validationMethod); + } + + @Override + public String getWriteableName() { + return NAME; + } } diff --git a/core/src/main/java/org/elasticsearch/index/query/GeoDistanceRangeQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/GeoDistanceRangeQueryParser.java index 39a0adf1cd1..4762e6f2428 100644 --- a/core/src/main/java/org/elasticsearch/index/query/GeoDistanceRangeQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/GeoDistanceRangeQueryParser.java @@ -19,19 +19,13 @@ package org.elasticsearch.index.query; -import org.apache.lucene.search.Query; -import org.elasticsearch.Version; -import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.ParseField; import org.elasticsearch.common.geo.GeoDistance; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoUtils; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.fielddata.IndexGeoPointFieldData; -import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper; -import org.elasticsearch.index.search.geo.GeoDistanceRangeQuery; import java.io.IOException; @@ -43,71 +37,95 @@ import java.io.IOException; * } * */ -public class GeoDistanceRangeQueryParser implements QueryParser { +public class GeoDistanceRangeQueryParser implements QueryParser { - public static final String NAME = "geo_distance_range"; - - @Inject - public GeoDistanceRangeQueryParser() { - } + public static final ParseField FROM_FIELD = new ParseField("from"); + public static final ParseField TO_FIELD = new ParseField("to"); + public static final ParseField INCLUDE_LOWER_FIELD = new ParseField("include_lower"); + public static final ParseField INCLUDE_UPPER_FIELD = new ParseField("include_upper"); + public static final ParseField GT_FIELD = new ParseField("gt"); + public static final ParseField GTE_FIELD = new ParseField("gte", "ge"); + public static final ParseField LT_FIELD = new ParseField("lt"); + public static final ParseField LTE_FIELD = new ParseField("lte", "le"); + public static final ParseField UNIT_FIELD = new ParseField("unit"); + public static final ParseField DISTANCE_TYPE_FIELD = new ParseField("distance_type"); + public static final ParseField NAME_FIELD = new ParseField("_name"); + public static final ParseField BOOST_FIELD = new ParseField("boost"); + public static final ParseField OPTIMIZE_BBOX_FIELD = new ParseField("optimize_bbox"); + public static final ParseField COERCE_FIELD = new ParseField("coerce", "normalize"); + public static final ParseField IGNORE_MALFORMED_FIELD = new ParseField("ignore_malformed"); + public static final ParseField VALIDATION_METHOD = new ParseField("validation_method"); @Override public String[] names() { - return new String[]{NAME, "geoDistanceRange"}; + return new String[]{GeoDistanceRangeQueryBuilder.NAME, "geoDistanceRange"}; } @Override - public Query parse(QueryParseContext parseContext) throws IOException, ParsingException { + public GeoDistanceRangeQueryBuilder getBuilderPrototype() { + return GeoDistanceRangeQueryBuilder.PROTOTYPE; + } + + @Override + public GeoDistanceRangeQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException { XContentParser parser = parseContext.parser(); XContentParser.Token token; + Float boost = null; String queryName = null; String currentFieldName = null; - GeoPoint point = new GeoPoint(); + GeoPoint point = null; String fieldName = null; Object vFrom = null; Object vTo = null; - boolean includeLower = true; - boolean includeUpper = true; - DistanceUnit unit = DistanceUnit.DEFAULT; - GeoDistance geoDistance = GeoDistance.DEFAULT; - String optimizeBbox = "memory"; - final boolean indexCreatedBeforeV2_0 = parseContext.indexVersionCreated().before(Version.V_2_0_0); - boolean coerce = false; - boolean ignoreMalformed = false; + Boolean includeLower = null; + Boolean includeUpper = null; + DistanceUnit unit = null; + GeoDistance geoDistance = null; + String optimizeBbox = null; + boolean coerce = GeoValidationMethod.DEFAULT_LENIENT_PARSING; + boolean ignoreMalformed = GeoValidationMethod.DEFAULT_LENIENT_PARSING; + GeoValidationMethod validationMethod = null; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (parseContext.isDeprecatedSetting(currentFieldName)) { // skip } else if (token == XContentParser.Token.START_ARRAY) { + if (point == null) { + point = new GeoPoint(); + } GeoUtils.parseGeoPoint(parser, point); fieldName = currentFieldName; } else if (token == XContentParser.Token.START_OBJECT) { // the json in the format of -> field : { lat : 30, lon : 12 } fieldName = currentFieldName; + if (point == null) { + point = new GeoPoint(); + } GeoUtils.parseGeoPoint(parser, point); } else if (token.isValue()) { - if (currentFieldName.equals("from")) { + if (parseContext.parseFieldMatcher().match(currentFieldName, FROM_FIELD)) { if (token == XContentParser.Token.VALUE_NULL) { } else if (token == XContentParser.Token.VALUE_STRING) { vFrom = parser.text(); // a String } else { vFrom = parser.numberValue(); // a Number } - } else if (currentFieldName.equals("to")) { + } else if (parseContext.parseFieldMatcher().match(currentFieldName, TO_FIELD)) { if (token == XContentParser.Token.VALUE_NULL) { } else if (token == XContentParser.Token.VALUE_STRING) { vTo = parser.text(); // a String } else { vTo = parser.numberValue(); // a Number } - } else if ("include_lower".equals(currentFieldName) || "includeLower".equals(currentFieldName)) { + } else if (parseContext.parseFieldMatcher().match(currentFieldName, INCLUDE_LOWER_FIELD)) { includeLower = parser.booleanValue(); - } else if ("include_upper".equals(currentFieldName) || "includeUpper".equals(currentFieldName)) { + } else if (parseContext.parseFieldMatcher().match(currentFieldName, INCLUDE_UPPER_FIELD)) { includeUpper = parser.booleanValue(); - } else if ("gt".equals(currentFieldName)) { + } else if (parseContext.parseFieldMatcher().match(currentFieldName, GT_FIELD)) { if (token == XContentParser.Token.VALUE_NULL) { } else if (token == XContentParser.Token.VALUE_STRING) { vFrom = parser.text(); // a String @@ -115,7 +133,7 @@ public class GeoDistanceRangeQueryParser implements QueryParser { vFrom = parser.numberValue(); // a Number } includeLower = false; - } else if ("gte".equals(currentFieldName) || "ge".equals(currentFieldName)) { + } else if (parseContext.parseFieldMatcher().match(currentFieldName, GTE_FIELD)) { if (token == XContentParser.Token.VALUE_NULL) { } else if (token == XContentParser.Token.VALUE_STRING) { vFrom = parser.text(); // a String @@ -123,7 +141,7 @@ public class GeoDistanceRangeQueryParser implements QueryParser { vFrom = parser.numberValue(); // a Number } includeLower = true; - } else if ("lt".equals(currentFieldName)) { + } else if (parseContext.parseFieldMatcher().match(currentFieldName, LT_FIELD)) { if (token == XContentParser.Token.VALUE_NULL) { } else if (token == XContentParser.Token.VALUE_STRING) { vTo = parser.text(); // a String @@ -131,7 +149,7 @@ public class GeoDistanceRangeQueryParser implements QueryParser { vTo = parser.numberValue(); // a Number } includeUpper = false; - } else if ("lte".equals(currentFieldName) || "le".equals(currentFieldName)) { + } else if (parseContext.parseFieldMatcher().match(currentFieldName, LTE_FIELD)) { if (token == XContentParser.Token.VALUE_NULL) { } else if (token == XContentParser.Token.VALUE_STRING) { vTo = parser.text(); // a String @@ -139,84 +157,98 @@ public class GeoDistanceRangeQueryParser implements QueryParser { vTo = parser.numberValue(); // a Number } includeUpper = true; - } else if (currentFieldName.equals("unit")) { + } else if (parseContext.parseFieldMatcher().match(currentFieldName, UNIT_FIELD)) { unit = DistanceUnit.fromString(parser.text()); - } else if (currentFieldName.equals("distance_type") || currentFieldName.equals("distanceType")) { + } else if (parseContext.parseFieldMatcher().match(currentFieldName, DISTANCE_TYPE_FIELD)) { geoDistance = GeoDistance.fromString(parser.text()); } else if (currentFieldName.endsWith(GeoPointFieldMapper.Names.LAT_SUFFIX)) { + if (point == null) { + point = new GeoPoint(); + } point.resetLat(parser.doubleValue()); fieldName = currentFieldName.substring(0, currentFieldName.length() - GeoPointFieldMapper.Names.LAT_SUFFIX.length()); } else if (currentFieldName.endsWith(GeoPointFieldMapper.Names.LON_SUFFIX)) { + if (point == null) { + point = new GeoPoint(); + } point.resetLon(parser.doubleValue()); fieldName = currentFieldName.substring(0, currentFieldName.length() - GeoPointFieldMapper.Names.LON_SUFFIX.length()); } else if (currentFieldName.endsWith(GeoPointFieldMapper.Names.GEOHASH_SUFFIX)) { - point.resetFromGeoHash(parser.text()); + point = GeoPoint.fromGeohash(parser.text()); fieldName = currentFieldName.substring(0, currentFieldName.length() - GeoPointFieldMapper.Names.GEOHASH_SUFFIX.length()); - } else if ("_name".equals(currentFieldName)) { + } else if (parseContext.parseFieldMatcher().match(currentFieldName, NAME_FIELD)) { queryName = parser.text(); - } else if ("optimize_bbox".equals(currentFieldName) || "optimizeBbox".equals(currentFieldName)) { + } else if (parseContext.parseFieldMatcher().match(currentFieldName, BOOST_FIELD)) { + boost = parser.floatValue(); + } else if (parseContext.parseFieldMatcher().match(currentFieldName, OPTIMIZE_BBOX_FIELD)) { optimizeBbox = parser.textOrNull(); - } else if ("coerce".equals(currentFieldName) || (indexCreatedBeforeV2_0 && "normalize".equals(currentFieldName))) { + } else if (parseContext.parseFieldMatcher().match(currentFieldName, COERCE_FIELD)) { coerce = parser.booleanValue(); - if (coerce == true) { - ignoreMalformed = true; - } - } else if ("ignore_malformed".equals(currentFieldName) && coerce == false) { + } else if (parseContext.parseFieldMatcher().match(currentFieldName, IGNORE_MALFORMED_FIELD)) { ignoreMalformed = parser.booleanValue(); + } else if (parseContext.parseFieldMatcher().match(currentFieldName, VALIDATION_METHOD)) { + validationMethod = GeoValidationMethod.fromString(parser.text()); } else { + if (point == null) { + point = new GeoPoint(); + } point.resetFromString(parser.text()); fieldName = currentFieldName; } } } - // validation was not available prior to 2.x, so to support bwc percolation queries we only ignore_malformed on 2.x created indexes - if (!indexCreatedBeforeV2_0 && !ignoreMalformed) { - if (point.lat() > 90.0 || point.lat() < -90.0) { - throw new ParsingException(parseContext, "illegal latitude value [{}] for [{}]", point.lat(), NAME); - } - if (point.lon() > 180.0 || point.lon() < -180) { - throw new ParsingException(parseContext, "illegal longitude value [{}] for [{}]", point.lon(), NAME); - } + GeoDistanceRangeQueryBuilder queryBuilder = new GeoDistanceRangeQueryBuilder(fieldName, point); + if (boost != null) { + queryBuilder.boost(boost); } - if (coerce) { - GeoUtils.normalizePoint(point, coerce, coerce); + if (queryName != null) { + queryBuilder.queryName(queryName); } - Double from = null; - Double to = null; if (vFrom != null) { if (vFrom instanceof Number) { - from = unit.toMeters(((Number) vFrom).doubleValue()); + queryBuilder.from((Number) vFrom); } else { - from = DistanceUnit.parse((String) vFrom, unit, DistanceUnit.DEFAULT); + queryBuilder.from((String) vFrom); } - from = geoDistance.normalize(from, DistanceUnit.DEFAULT); } + if (vTo != null) { if (vTo instanceof Number) { - to = unit.toMeters(((Number) vTo).doubleValue()); + queryBuilder.to((Number) vTo); } else { - to = DistanceUnit.parse((String) vTo, unit, DistanceUnit.DEFAULT); + queryBuilder.to((String) vTo); } - to = geoDistance.normalize(to, DistanceUnit.DEFAULT); } - MappedFieldType fieldType = parseContext.fieldMapper(fieldName); - if (fieldType == null) { - throw new ParsingException(parseContext, "failed to find geo_point field [" + fieldName + "]"); + if (includeUpper != null) { + queryBuilder.includeUpper(includeUpper); } - if (!(fieldType instanceof GeoPointFieldMapper.GeoPointFieldType)) { - throw new ParsingException(parseContext, "field [" + fieldName + "] is not a geo_point field"); - } - GeoPointFieldMapper.GeoPointFieldType geoFieldType = ((GeoPointFieldMapper.GeoPointFieldType) fieldType); - IndexGeoPointFieldData indexFieldData = parseContext.getForField(fieldType); - Query query = new GeoDistanceRangeQuery(point, from, to, includeLower, includeUpper, geoDistance, geoFieldType, indexFieldData, optimizeBbox); - if (queryName != null) { - parseContext.addNamedQuery(queryName, query); + if (includeLower != null) { + queryBuilder.includeLower(includeLower); } - return query; + + if (unit != null) { + queryBuilder.unit(unit); + } + + if (geoDistance != null) { + queryBuilder.geoDistance(geoDistance); + } + + if (optimizeBbox != null) { + queryBuilder.optimizeBbox(optimizeBbox); + } + + if (validationMethod != null) { + // if validation method is set explicitly ignore deprecated coerce/ignore malformed fields if any + queryBuilder.setValidationMethod(validationMethod); + } else { + queryBuilder.setValidationMethod(GeoValidationMethod.infer(coerce, ignoreMalformed)); + } + return queryBuilder; } } diff --git a/core/src/main/java/org/elasticsearch/index/query/GeoExecType.java b/core/src/main/java/org/elasticsearch/index/query/GeoExecType.java new file mode 100644 index 00000000000..11b9941117b --- /dev/null +++ b/core/src/main/java/org/elasticsearch/index/query/GeoExecType.java @@ -0,0 +1,73 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; + +import java.io.IOException; + +/** Specifies how a geo query should be run. */ +public enum GeoExecType implements Writeable { + + MEMORY(0), INDEXED(1); + + private final int ordinal; + + private static final GeoExecType PROTOTYPE = MEMORY; + + GeoExecType(int ordinal) { + this.ordinal = ordinal; + } + + @Override + public GeoExecType readFrom(StreamInput in) throws IOException { + int ord = in.readVInt(); + switch(ord) { + case(0): return MEMORY; + case(1): return INDEXED; + } + throw new ElasticsearchException("unknown serialized type [" + ord + "]"); + } + + public static GeoExecType readTypeFrom(StreamInput in) throws IOException { + return PROTOTYPE.readFrom(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeVInt(this.ordinal); + } + + public static GeoExecType fromString(String typeName) { + if (typeName == null) { + throw new IllegalArgumentException("cannot parse type from null string"); + } + + for (GeoExecType type : GeoExecType.values()) { + if (type.name().equalsIgnoreCase(typeName)) { + return type; + } + } + throw new IllegalArgumentException("no type can be parsed from ordinal " + typeName); + } +} \ No newline at end of file diff --git a/core/src/main/java/org/elasticsearch/index/query/GeoPolygonQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/GeoPolygonQueryBuilder.java index 2d486e05a12..35b7cbadb11 100644 --- a/core/src/main/java/org/elasticsearch/index/query/GeoPolygonQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/GeoPolygonQueryBuilder.java @@ -19,90 +19,178 @@ package org.elasticsearch.index.query; +import org.apache.lucene.search.Query; +import org.elasticsearch.Version; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.geo.GeoPoint; +import org.elasticsearch.common.geo.GeoUtils; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.fielddata.IndexGeoPointFieldData; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper; +import org.elasticsearch.index.search.geo.GeoPolygonQuery; import java.io.IOException; import java.util.ArrayList; +import java.util.Arrays; import java.util.List; +import java.util.Objects; -public class GeoPolygonQueryBuilder extends QueryBuilder { +public class GeoPolygonQueryBuilder extends AbstractQueryBuilder { - public static final String POINTS = GeoPolygonQueryParser.POINTS; - - private final String name; + public static final String NAME = "geo_polygon"; - private final List shell = new ArrayList<>(); + private static final List PROTO_SHAPE = Arrays.asList(new GeoPoint[] { new GeoPoint(1.0, 1.0), new GeoPoint(1.0, 2.0), + new GeoPoint(2.0, 1.0) }); - private String queryName; + static final GeoPolygonQueryBuilder PROTOTYPE = new GeoPolygonQueryBuilder("field", PROTO_SHAPE); - private Boolean coerce; + private final String fieldName; - private Boolean ignoreMalformed; + private final List shell; - public GeoPolygonQueryBuilder(String name) { - this.name = name; + private GeoValidationMethod validationMethod = GeoValidationMethod.DEFAULT; + + public GeoPolygonQueryBuilder(String fieldName, List points) { + if (Strings.isEmpty(fieldName)) { + throw new IllegalArgumentException("fieldName must not be null"); + } + if (points == null || points.isEmpty()) { + throw new IllegalArgumentException("polygon must not be null or empty"); + } else { + GeoPoint start = points.get(0); + if (start.equals(points.get(points.size() - 1))) { + if (points.size() < 4) { + throw new IllegalArgumentException("too few points defined for geo_polygon query"); + } + } else { + if (points.size() < 3) { + throw new IllegalArgumentException("too few points defined for geo_polygon query"); + } + } + } + this.fieldName = fieldName; + this.shell = points; } - /** - * Adds a point with lat and lon - * - * @param lat The latitude - * @param lon The longitude - */ - public GeoPolygonQueryBuilder addPoint(double lat, double lon) { - return addPoint(new GeoPoint(lat, lon)); + public String fieldName() { + return fieldName; } - public GeoPolygonQueryBuilder addPoint(String geohash) { - return addPoint(GeoPoint.fromGeohash(geohash)); + public List points() { + return shell; } - public GeoPolygonQueryBuilder addPoint(GeoPoint point) { - shell.add(point); - return this; - } - - /** - * Sets the filter name for the filter that can be used when searching for matched_filters per hit. - */ - public GeoPolygonQueryBuilder queryName(String queryName) { - this.queryName = queryName; + /** Sets the validation method to use for geo coordinates. */ + public GeoPolygonQueryBuilder setValidationMethod(GeoValidationMethod method) { + this.validationMethod = method; return this; } - public GeoPolygonQueryBuilder coerce(boolean coerce) { - this.coerce = coerce; - return this; + /** Returns the validation method to use for geo coordinates. */ + public GeoValidationMethod getValidationMethod() { + return this.validationMethod; } - public GeoPolygonQueryBuilder ignoreMalformed(boolean ignoreMalformed) { - this.ignoreMalformed = ignoreMalformed; - return this; + @Override + protected Query doToQuery(QueryShardContext context) throws IOException { + + if (!shell.get(shell.size() - 1).equals(shell.get(0))) { + shell.add(shell.get(0)); + } + + final boolean indexCreatedBeforeV2_0 = context.indexVersionCreated().before(Version.V_2_0_0); + // validation was not available prior to 2.x, so to support bwc + // percolation queries we only ignore_malformed on 2.x created indexes + if (!indexCreatedBeforeV2_0 && !GeoValidationMethod.isIgnoreMalformed(validationMethod)) { + for (GeoPoint point : shell) { + if (!GeoUtils.isValidLatitude(point.lat())) { + throw new QueryShardException(context, "illegal latitude value [{}] for [{}]", point.lat(), + GeoPolygonQueryBuilder.NAME); + } + if (!GeoUtils.isValidLongitude(point.lat())) { + throw new QueryShardException(context, "illegal longitude value [{}] for [{}]", point.lon(), + GeoPolygonQueryBuilder.NAME); + } + } + } + + if (GeoValidationMethod.isCoerce(validationMethod)) { + for (GeoPoint point : shell) { + GeoUtils.normalizePoint(point, true, true); + } + } + + MappedFieldType fieldType = context.fieldMapper(fieldName); + if (fieldType == null) { + throw new QueryShardException(context, "failed to find geo_point field [" + fieldName + "]"); + } + if (!(fieldType instanceof GeoPointFieldMapper.GeoPointFieldType)) { + throw new QueryShardException(context, "field [" + fieldName + "] is not a geo_point field"); + } + + IndexGeoPointFieldData indexFieldData = context.getForField(fieldType); + return new GeoPolygonQuery(indexFieldData, shell.toArray(new GeoPoint[shell.size()])); } @Override protected void doXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(GeoPolygonQueryParser.NAME); + builder.startObject(NAME); - builder.startObject(name); - builder.startArray(POINTS); + builder.startObject(fieldName); + builder.startArray(GeoPolygonQueryParser.POINTS_FIELD.getPreferredName()); for (GeoPoint point : shell) { builder.startArray().value(point.lon()).value(point.lat()).endArray(); } builder.endArray(); builder.endObject(); - if (queryName != null) { - builder.field("_name", queryName); - } - if (coerce != null) { - builder.field("coerce", coerce); - } - if (ignoreMalformed != null) { - builder.field("ignore_malformed", ignoreMalformed); - } + builder.field(GeoPolygonQueryParser.COERCE_FIELD.getPreferredName(), GeoValidationMethod.isCoerce(validationMethod)); + builder.field(GeoPolygonQueryParser.IGNORE_MALFORMED_FIELD.getPreferredName(), GeoValidationMethod.isIgnoreMalformed(validationMethod)); + printBoostAndQueryName(builder); builder.endObject(); } + + @Override + protected GeoPolygonQueryBuilder doReadFrom(StreamInput in) throws IOException { + String fieldName = in.readString(); + List shell = new ArrayList<>(); + int size = in.readVInt(); + for (int i = 0; i < size; i++) { + shell.add(GeoPoint.readGeoPointFrom(in)); + } + GeoPolygonQueryBuilder builder = new GeoPolygonQueryBuilder(fieldName, shell); + builder.validationMethod = GeoValidationMethod.readGeoValidationMethodFrom(in); + return builder; + } + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + out.writeString(fieldName); + out.writeVInt(shell.size()); + for (GeoPoint point : shell) { + point.writeTo(out); + } + validationMethod.writeTo(out); + } + + @Override + protected boolean doEquals(GeoPolygonQueryBuilder other) { + return Objects.equals(validationMethod, other.validationMethod) + && Objects.equals(fieldName, other.fieldName) + && Objects.equals(shell, other.shell); + } + + @Override + protected int doHashCode() { + return Objects.hash(validationMethod, fieldName, shell); + } + + @Override + public String getWriteableName() { + return NAME; + } } diff --git a/core/src/main/java/org/elasticsearch/index/query/GeoPolygonQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/GeoPolygonQueryParser.java index 53903227740..3298a0fdaef 100644 --- a/core/src/main/java/org/elasticsearch/index/query/GeoPolygonQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/GeoPolygonQueryParser.java @@ -19,18 +19,12 @@ package org.elasticsearch.index.query; -import org.apache.lucene.search.Query; -import org.elasticsearch.Version; +import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoUtils; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser.Token; -import org.elasticsearch.index.fielddata.IndexGeoPointFieldData; -import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper; -import org.elasticsearch.index.search.geo.GeoPolygonQuery; import java.io.IOException; import java.util.ArrayList; @@ -48,31 +42,30 @@ import java.util.List; * } * */ -public class GeoPolygonQueryParser implements QueryParser { +public class GeoPolygonQueryParser implements QueryParser { - public static final String NAME = "geo_polygon"; - public static final String POINTS = "points"; - - @Inject - public GeoPolygonQueryParser() { - } + public static final ParseField COERCE_FIELD = new ParseField("coerce", "normalize"); + public static final ParseField IGNORE_MALFORMED_FIELD = new ParseField("ignore_malformed"); + public static final ParseField VALIDATION_METHOD = new ParseField("validation_method"); + public static final ParseField POINTS_FIELD = new ParseField("points"); @Override public String[] names() { - return new String[]{NAME, "geoPolygon"}; + return new String[]{GeoPolygonQueryBuilder.NAME, "geoPolygon"}; } @Override - public Query parse(QueryParseContext parseContext) throws IOException, ParsingException { + public GeoPolygonQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException { XContentParser parser = parseContext.parser(); String fieldName = null; - List shell = new ArrayList<>(); + List shell = null; - final boolean indexCreatedBeforeV2_0 = parseContext.indexVersionCreated().before(Version.V_2_0_0); - boolean coerce = false; - boolean ignoreMalformed = false; + Float boost = null; + boolean coerce = GeoValidationMethod.DEFAULT_LENIENT_PARSING; + boolean ignoreMalformed = GeoValidationMethod.DEFAULT_LENIENT_PARSING; + GeoValidationMethod validationMethod = null; String queryName = null; String currentFieldName = null; XContentParser.Token token; @@ -89,86 +82,60 @@ public class GeoPolygonQueryParser implements QueryParser { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token == XContentParser.Token.START_ARRAY) { - if (POINTS.equals(currentFieldName)) { + if (parseContext.parseFieldMatcher().match(currentFieldName, POINTS_FIELD)) { + shell = new ArrayList(); while ((token = parser.nextToken()) != Token.END_ARRAY) { shell.add(GeoUtils.parseGeoPoint(parser)); } - if (!shell.get(shell.size()-1).equals(shell.get(0))) { - shell.add(shell.get(0)); - } } else { - throw new ParsingException(parseContext, "[geo_polygon] query does not support [" + currentFieldName + throw new ParsingException(parser.getTokenLocation(), "[geo_polygon] query does not support [" + currentFieldName + "]"); } } else { - throw new ParsingException(parseContext, "[geo_polygon] query does not support token type [" + token.name() + throw new ParsingException(parser.getTokenLocation(), "[geo_polygon] query does not support token type [" + token.name() + "] under [" + currentFieldName + "]"); } } } else if (token.isValue()) { if ("_name".equals(currentFieldName)) { queryName = parser.text(); - } else if ("coerce".equals(currentFieldName) || (indexCreatedBeforeV2_0 && "normalize".equals(currentFieldName))) { + } else if ("boost".equals(currentFieldName)) { + boost = parser.floatValue(); + } else if (parseContext.parseFieldMatcher().match(currentFieldName, COERCE_FIELD)) { coerce = parser.booleanValue(); if (coerce == true) { ignoreMalformed = true; } - } else if ("ignore_malformed".equals(currentFieldName) && coerce == false) { + } else if (parseContext.parseFieldMatcher().match(currentFieldName, IGNORE_MALFORMED_FIELD)) { ignoreMalformed = parser.booleanValue(); + } else if (parseContext.parseFieldMatcher().match(currentFieldName, VALIDATION_METHOD)) { + validationMethod = GeoValidationMethod.fromString(parser.text()); } else { - throw new ParsingException(parseContext, "[geo_polygon] query does not support [" + currentFieldName + "]"); + throw new ParsingException(parser.getTokenLocation(), "[geo_polygon] query does not support [" + currentFieldName + "]"); } } else { - throw new ParsingException(parseContext, "[geo_polygon] unexpected token type [" + token.name() + "]"); + throw new ParsingException(parser.getTokenLocation(), "[geo_polygon] unexpected token type [" + token.name() + "]"); } } - - if (shell.isEmpty()) { - throw new ParsingException(parseContext, "no points defined for geo_polygon query"); + GeoPolygonQueryBuilder builder = new GeoPolygonQueryBuilder(fieldName, shell); + if (validationMethod != null) { + // if GeoValidationMethod was explicitly set ignore deprecated coerce and ignoreMalformed settings + builder.setValidationMethod(validationMethod); } else { - if (shell.size() < 3) { - throw new ParsingException(parseContext, "too few points defined for geo_polygon query"); - } - GeoPoint start = shell.get(0); - if (!start.equals(shell.get(shell.size() - 1))) { - shell.add(start); - } - if (shell.size() < 4) { - throw new ParsingException(parseContext, "too few points defined for geo_polygon query"); - } + builder.setValidationMethod(GeoValidationMethod.infer(coerce, ignoreMalformed)); } - // validation was not available prior to 2.x, so to support bwc percolation queries we only ignore_malformed on 2.x created indexes - if (!indexCreatedBeforeV2_0 && !ignoreMalformed) { - for (GeoPoint point : shell) { - if (point.lat() > 90.0 || point.lat() < -90.0) { - throw new ParsingException(parseContext, "illegal latitude value [{}] for [{}]", point.lat(), NAME); - } - if (point.lon() > 180.0 || point.lon() < -180) { - throw new ParsingException(parseContext, "illegal longitude value [{}] for [{}]", point.lon(), NAME); - } - } - } - - if (coerce) { - for (GeoPoint point : shell) { - GeoUtils.normalizePoint(point, coerce, coerce); - } - } - - MappedFieldType fieldType = parseContext.fieldMapper(fieldName); - if (fieldType == null) { - throw new ParsingException(parseContext, "failed to find geo_point field [" + fieldName + "]"); - } - if (!(fieldType instanceof GeoPointFieldMapper.GeoPointFieldType)) { - throw new ParsingException(parseContext, "field [" + fieldName + "] is not a geo_point field"); - } - - IndexGeoPointFieldData indexFieldData = parseContext.getForField(fieldType); - Query query = new GeoPolygonQuery(indexFieldData, shell.toArray(new GeoPoint[shell.size()])); if (queryName != null) { - parseContext.addNamedQuery(queryName, query); + builder.queryName(queryName); } - return query; + if (boost != null) { + builder.boost(boost); + } + return builder; + } + + @Override + public GeoPolygonQueryBuilder getBuilderPrototype() { + return GeoPolygonQueryBuilder.PROTOTYPE; } } diff --git a/core/src/main/java/org/elasticsearch/index/query/GeoShapeQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/GeoShapeQueryBuilder.java index 3887874ee94..31bc889cb91 100644 --- a/core/src/main/java/org/elasticsearch/index/query/GeoShapeQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/GeoShapeQueryBuilder.java @@ -19,100 +19,182 @@ package org.elasticsearch.index.query; +import org.apache.lucene.search.BooleanClause; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.ConstantScoreQuery; +import org.apache.lucene.search.Filter; +import org.apache.lucene.search.Query; +import org.apache.lucene.spatial.prefix.PrefixTreeStrategy; +import org.apache.lucene.spatial.prefix.RecursivePrefixTreeStrategy; +import org.apache.lucene.spatial.query.SpatialArgs; +import org.apache.lucene.spatial.query.SpatialOperation; +import org.elasticsearch.action.get.GetRequest; +import org.elasticsearch.action.get.GetResponse; +import org.elasticsearch.client.Client; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.geo.ShapeRelation; +import org.elasticsearch.common.geo.ShapesAvailability; import org.elasticsearch.common.geo.SpatialStrategy; import org.elasticsearch.common.geo.builders.ShapeBuilder; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.geo.GeoShapeFieldMapper; +import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; +import java.util.Objects; /** - * {@link QueryBuilder} that builds a GeoShape Filter + * {@link QueryBuilder} that builds a GeoShape Query */ -public class GeoShapeQueryBuilder extends QueryBuilder implements BoostableQueryBuilder { +public class GeoShapeQueryBuilder extends AbstractQueryBuilder { - private final String name; + public static final String NAME = "geo_shape"; + public static final String DEFAULT_SHAPE_INDEX_NAME = "shapes"; + public static final String DEFAULT_SHAPE_FIELD_NAME = "shape"; + public static final ShapeRelation DEFAULT_SHAPE_RELATION = ShapeRelation.INTERSECTS; - private final ShapeBuilder shape; + static final GeoShapeQueryBuilder PROTOTYPE = new GeoShapeQueryBuilder("field", new BytesArray(new byte[1])); + + private final String fieldName; + + // TODO make the ShapeBuilder and subclasses Writable and implement hashCode + // and Equals so ShapeBuilder can be used here + private BytesReference shapeBytes; private SpatialStrategy strategy = null; - private String queryName; - private final String indexedShapeId; private final String indexedShapeType; - private String indexedShapeIndex; - private String indexedShapePath; + private String indexedShapeIndex = DEFAULT_SHAPE_INDEX_NAME; + private String indexedShapePath = DEFAULT_SHAPE_FIELD_NAME; - private ShapeRelation relation = null; + private ShapeRelation relation = DEFAULT_SHAPE_RELATION; - private float boost = -1; - /** - * Creates a new GeoShapeQueryBuilder whose Filter will be against the - * given field name using the given Shape + * Creates a new GeoShapeQueryBuilder whose Query will be against the given + * field name using the given Shape * - * @param name Name of the field that will be filtered - * @param shape Shape used in the filter + * @param fieldName + * Name of the field that will be queried + * @param shape + * Shape used in the Query */ - public GeoShapeQueryBuilder(String name, ShapeBuilder shape) { - this(name, shape, null, null, null); + public GeoShapeQueryBuilder(String fieldName, ShapeBuilder shape) throws IOException { + this(fieldName, shape, null, null); } /** - * Creates a new GeoShapeQueryBuilder whose Filter will be against the - * given field name using the given Shape + * Creates a new GeoShapeQueryBuilder whose Query will be against the given + * field name and will use the Shape found with the given ID in the given + * type * - * @param name Name of the field that will be filtered - * @param relation {@link ShapeRelation} of query and indexed shape - * @param shape Shape used in the filter + * @param fieldName + * Name of the field that will be filtered + * @param indexedShapeId + * ID of the indexed Shape that will be used in the Query + * @param indexedShapeType + * Index type of the indexed Shapes */ - public GeoShapeQueryBuilder(String name, ShapeBuilder shape, ShapeRelation relation) { - this(name, shape, null, null, relation); + public GeoShapeQueryBuilder(String fieldName, String indexedShapeId, String indexedShapeType) { + this(fieldName, (BytesReference) null, indexedShapeId, indexedShapeType); } - /** - * Creates a new GeoShapeQueryBuilder whose Filter will be against the given field name - * and will use the Shape found with the given ID in the given type - * - * @param name Name of the field that will be filtered - * @param indexedShapeId ID of the indexed Shape that will be used in the Filter - * @param indexedShapeType Index type of the indexed Shapes - */ - public GeoShapeQueryBuilder(String name, String indexedShapeId, String indexedShapeType, ShapeRelation relation) { - this(name, null, indexedShapeId, indexedShapeType, relation); + GeoShapeQueryBuilder(String fieldName, BytesReference shapeBytes) { + this(fieldName, shapeBytes, null, null); } - private GeoShapeQueryBuilder(String name, ShapeBuilder shape, String indexedShapeId, String indexedShapeType, ShapeRelation relation) { - this.name = name; - this.shape = shape; + private GeoShapeQueryBuilder(String fieldName, ShapeBuilder shape, String indexedShapeId, String indexedShapeType) throws IOException { + this(fieldName, new BytesArray(new byte[1]), indexedShapeId, indexedShapeType); + if (shape != null) { + XContentBuilder builder = XContentFactory.jsonBuilder(); + shape.toXContent(builder, EMPTY_PARAMS); + this.shapeBytes = shape.buildAsBytes(XContentType.JSON); + if (this.shapeBytes.length() == 0) { + throw new IllegalArgumentException("shape must not be empty"); + } + } else { + throw new IllegalArgumentException("shape must not be null"); + } + } + + private GeoShapeQueryBuilder(String fieldName, BytesReference shapeBytes, String indexedShapeId, String indexedShapeType) { + if (fieldName == null) { + throw new IllegalArgumentException("fieldName is required"); + } + if ((shapeBytes == null || shapeBytes.length() == 0) && indexedShapeId == null) { + throw new IllegalArgumentException("either shapeBytes or indexedShapeId and indexedShapeType are required"); + } + if (indexedShapeId != null && indexedShapeType == null) { + throw new IllegalArgumentException("indexedShapeType is required if indexedShapeId is specified"); + } + this.fieldName = fieldName; + this.shapeBytes = shapeBytes; this.indexedShapeId = indexedShapeId; - this.relation = relation; this.indexedShapeType = indexedShapeType; } /** - * Sets the name of the filter + * @return the name of the field that will be queried + */ + public String fieldName() { + return fieldName; + } + + /** + * @return the JSON bytes for the shape used in the Query + */ + public BytesReference shapeBytes() { + return shapeBytes; + } + + /** + * @return the ID of the indexed Shape that will be used in the Query + */ + public String indexedShapeId() { + return indexedShapeId; + } + + /** + * @return the document type of the indexed Shape that will be used in the + * Query + */ + public String indexedShapeType() { + return indexedShapeType; + } + + /** + * Defines which spatial strategy will be used for building the geo shape + * Query. When not set, the strategy that will be used will be the one that + * is associated with the geo shape field in the mappings. * - * @param queryName Name of the filter + * @param strategy + * The spatial strategy to use for building the geo shape Query * @return this */ - public GeoShapeQueryBuilder queryName(String queryName) { - this.queryName = queryName; + public GeoShapeQueryBuilder strategy(SpatialStrategy strategy) { + if (strategy != null && strategy == SpatialStrategy.TERM && relation != ShapeRelation.INTERSECTS) { + throw new IllegalArgumentException("strategy [" + strategy.getStrategyName() + "] only supports relation [" + + ShapeRelation.INTERSECTS.getRelationName() + "] found relation [" + relation.getRelationName() + "]"); + } + this.strategy = strategy; return this; } /** - * Defines which spatial strategy will be used for building the geo shape filter. When not set, the strategy that - * will be used will be the one that is associated with the geo shape field in the mappings. - * - * @param strategy The spatial strategy to use for building the geo shape filter - * @return this + * @return The spatial strategy to use for building the geo shape Query */ - public GeoShapeQueryBuilder strategy(SpatialStrategy strategy) { - this.strategy = strategy; - return this; + public SpatialStrategy strategy() { + return strategy; } /** @@ -126,6 +208,14 @@ public class GeoShapeQueryBuilder extends QueryBuilder implements BoostableQuery return this; } + /** + * @return the index name for the indexed Shape that will be used in the + * Query + */ + public String indexedShapeIndex() { + return indexedShapeIndex; + } + /** * Sets the path of the field in the indexed Shape document that has the Shape itself * @@ -137,6 +227,13 @@ public class GeoShapeQueryBuilder extends QueryBuilder implements BoostableQuery return this; } + /** + * @return the path of the indexed Shape that will be used in the Query + */ + public String indexedShapePath() { + return indexedShapePath; + } + /** * Sets the relation of query shape and indexed shape. * @@ -144,55 +241,235 @@ public class GeoShapeQueryBuilder extends QueryBuilder implements BoostableQuery * @return this */ public GeoShapeQueryBuilder relation(ShapeRelation relation) { + if (relation == null) { + throw new IllegalArgumentException("No Shape Relation defined"); + } + if (strategy != null && strategy == SpatialStrategy.TERM && relation != ShapeRelation.INTERSECTS) { + throw new IllegalArgumentException("current strategy [" + strategy.getStrategyName() + "] only supports relation [" + + ShapeRelation.INTERSECTS.getRelationName() + "] found relation [" + relation.getRelationName() + "]"); + } this.relation = relation; return this; } + /** + * @return the relation of query shape and indexed shape to use in the Query + */ + public ShapeRelation relation() { + return relation; + } + @Override - public GeoShapeQueryBuilder boost(float boost) { - this.boost = boost; - return this; + protected Query doToQuery(QueryShardContext context) throws IOException { + ShapeBuilder shape; + if (shapeBytes == null) { + GetRequest getRequest = new GetRequest(indexedShapeIndex, indexedShapeType, indexedShapeId); + getRequest.copyContextAndHeadersFrom(SearchContext.current()); + shape = fetch(context.getClient(), getRequest, indexedShapePath); + } else { + XContentParser shapeParser = XContentHelper.createParser(shapeBytes); + shapeParser.nextToken(); + shape = ShapeBuilder.parse(shapeParser); + } + MappedFieldType fieldType = context.fieldMapper(fieldName); + if (fieldType == null) { + throw new QueryShardException(context, "Failed to find geo_shape field [" + fieldName + "]"); + } + + // TODO: This isn't the nicest way to check this + if (!(fieldType instanceof GeoShapeFieldMapper.GeoShapeFieldType)) { + throw new QueryShardException(context, "Field [" + fieldName + "] is not a geo_shape"); + } + + GeoShapeFieldMapper.GeoShapeFieldType shapeFieldType = (GeoShapeFieldMapper.GeoShapeFieldType) fieldType; + + PrefixTreeStrategy strategy = shapeFieldType.defaultStrategy(); + if (this.strategy != null) { + strategy = shapeFieldType.resolveStrategy(this.strategy); + } + Query query; + if (strategy instanceof RecursivePrefixTreeStrategy && relation == ShapeRelation.DISJOINT) { + // this strategy doesn't support disjoint anymore: but it did + // before, including creating lucene fieldcache (!) + // in this case, execute disjoint as exists && !intersects + BooleanQuery.Builder bool = new BooleanQuery.Builder(); + Query exists = ExistsQueryBuilder.newFilter(context, fieldName); + Filter intersects = strategy.makeFilter(getArgs(shape, ShapeRelation.INTERSECTS)); + bool.add(exists, BooleanClause.Occur.MUST); + bool.add(intersects, BooleanClause.Occur.MUST_NOT); + query = new ConstantScoreQuery(bool.build()); + } else { + query = strategy.makeQuery(getArgs(shape, relation)); + } + return query; + } + + /** + * Fetches the Shape with the given ID in the given type and index. + * + * @param getRequest + * GetRequest containing index, type and id + * @param path + * Name or path of the field in the Shape Document where the + * Shape itself is located + * @return Shape with the given ID + * @throws IOException + * Can be thrown while parsing the Shape Document and extracting + * the Shape + */ + private ShapeBuilder fetch(Client client, GetRequest getRequest, String path) throws IOException { + if (ShapesAvailability.JTS_AVAILABLE == false) { + throw new IllegalStateException("JTS not available"); + } + getRequest.preference("_local"); + getRequest.operationThreaded(false); + GetResponse response = client.get(getRequest).actionGet(); + if (!response.isExists()) { + throw new IllegalArgumentException("Shape with ID [" + getRequest.id() + "] in type [" + getRequest.type() + "] not found"); + } + + String[] pathElements = Strings.splitStringToArray(path, '.'); + int currentPathSlot = 0; + + XContentParser parser = null; + try { + parser = XContentHelper.createParser(response.getSourceAsBytesRef()); + XContentParser.Token currentToken; + while ((currentToken = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (currentToken == XContentParser.Token.FIELD_NAME) { + if (pathElements[currentPathSlot].equals(parser.currentName())) { + parser.nextToken(); + if (++currentPathSlot == pathElements.length) { + return ShapeBuilder.parse(parser); + } + } else { + parser.nextToken(); + parser.skipChildren(); + } + } + } + throw new IllegalStateException("Shape with name [" + getRequest.id() + "] found but missing " + path + " field"); + } finally { + if (parser != null) { + parser.close(); + } + } + } + + public static SpatialArgs getArgs(ShapeBuilder shape, ShapeRelation relation) { + switch (relation) { + case DISJOINT: + return new SpatialArgs(SpatialOperation.IsDisjointTo, shape.build()); + case INTERSECTS: + return new SpatialArgs(SpatialOperation.Intersects, shape.build()); + case WITHIN: + return new SpatialArgs(SpatialOperation.IsWithin, shape.build()); + default: + throw new IllegalArgumentException("invalid relation [" + relation + "]"); + } } @Override protected void doXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(GeoShapeQueryParser.NAME); + builder.startObject(NAME); - builder.startObject(name); + builder.startObject(fieldName); if (strategy != null) { - builder.field("strategy", strategy.getStrategyName()); + builder.field(GeoShapeQueryParser.STRATEGY_FIELD.getPreferredName(), strategy.getStrategyName()); } - if (shape != null) { - builder.field("shape", shape); + if (shapeBytes != null) { + builder.field(GeoShapeQueryParser.SHAPE_FIELD.getPreferredName()); + XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(shapeBytes); + parser.nextToken(); + builder.copyCurrentStructure(parser); } else { - builder.startObject("indexed_shape") - .field("id", indexedShapeId) - .field("type", indexedShapeType); + builder.startObject(GeoShapeQueryParser.INDEXED_SHAPE_FIELD.getPreferredName()) + .field(GeoShapeQueryParser.SHAPE_ID_FIELD.getPreferredName(), indexedShapeId) + .field(GeoShapeQueryParser.SHAPE_TYPE_FIELD.getPreferredName(), indexedShapeType); if (indexedShapeIndex != null) { - builder.field("index", indexedShapeIndex); + builder.field(GeoShapeQueryParser.SHAPE_INDEX_FIELD.getPreferredName(), indexedShapeIndex); } if (indexedShapePath != null) { - builder.field("path", indexedShapePath); + builder.field(GeoShapeQueryParser.SHAPE_PATH_FIELD.getPreferredName(), indexedShapePath); } builder.endObject(); } if(relation != null) { - builder.field("relation", relation.getRelationName()); + builder.field(GeoShapeQueryParser.RELATION_FIELD.getPreferredName(), relation.getRelationName()); } builder.endObject(); - if (boost != -1) { - builder.field("boost", boost); - } - - if (name != null) { - builder.field("_name", queryName); - } + printBoostAndQueryName(builder); builder.endObject(); } + + @Override + protected GeoShapeQueryBuilder doReadFrom(StreamInput in) throws IOException { + String fieldName = in.readString(); + GeoShapeQueryBuilder builder; + if (in.readBoolean()) { + BytesReference shapeBytes = in.readBytesReference(); + builder = new GeoShapeQueryBuilder(fieldName, shapeBytes); + } else { + String indexedShapeId = in.readOptionalString(); + String indexedShapeType = in.readOptionalString(); + String indexedShapeIndex = in.readOptionalString(); + String indexedShapePath = in.readOptionalString(); + builder = new GeoShapeQueryBuilder(fieldName, indexedShapeId, indexedShapeType); + if (indexedShapeIndex != null) { + builder.indexedShapeIndex = indexedShapeIndex; + } + if (indexedShapePath != null) { + builder.indexedShapePath = indexedShapePath; + } + } + builder.relation = ShapeRelation.DISJOINT.readFrom(in); + builder.strategy = SpatialStrategy.RECURSIVE.readFrom(in); + return builder; + } + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + out.writeString(fieldName); + boolean hasShapeBytes = shapeBytes != null; + out.writeBoolean(hasShapeBytes); + if (hasShapeBytes) { + out.writeBytesReference(shapeBytes); + } else { + out.writeOptionalString(indexedShapeId); + out.writeOptionalString(indexedShapeType); + out.writeOptionalString(indexedShapeIndex); + out.writeOptionalString(indexedShapePath); + } + relation.writeTo(out); + strategy.writeTo(out); + } + + @Override + protected boolean doEquals(GeoShapeQueryBuilder other) { + return Objects.equals(fieldName, other.fieldName) + && Objects.equals(indexedShapeId, other.indexedShapeId) + && Objects.equals(indexedShapeIndex, other.indexedShapeIndex) + && Objects.equals(indexedShapePath, other.indexedShapePath) + && Objects.equals(indexedShapeType, other.indexedShapeType) + && Objects.equals(relation, other.relation) + && Objects.equals(shapeBytes, other.shapeBytes) + && Objects.equals(strategy, other.strategy); + } + + @Override + protected int doHashCode() { + return Objects.hash(fieldName, indexedShapeId, indexedShapeIndex, + indexedShapePath, indexedShapeType, relation, shapeBytes, strategy); + } + + @Override + public String getWriteableName() { + return NAME; + } } diff --git a/core/src/main/java/org/elasticsearch/index/query/GeoShapeQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/GeoShapeQueryParser.java index 9a367edc483..e5198952c13 100644 --- a/core/src/main/java/org/elasticsearch/index/query/GeoShapeQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/GeoShapeQueryParser.java @@ -19,59 +19,51 @@ package org.elasticsearch.index.query; -import org.apache.lucene.search.*; -import org.apache.lucene.spatial.prefix.PrefixTreeStrategy; -import org.apache.lucene.spatial.prefix.RecursivePrefixTreeStrategy; -import org.apache.lucene.spatial.query.SpatialArgs; -import org.apache.lucene.spatial.query.SpatialOperation; -import org.elasticsearch.action.get.GetRequest; -import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.geo.ShapeRelation; -import org.elasticsearch.common.geo.builders.ShapeBuilder; -import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.geo.SpatialStrategy; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.mapper.geo.GeoShapeFieldMapper; -import org.elasticsearch.index.search.shape.ShapeFetchService; -import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; -public class GeoShapeQueryParser implements QueryParser { +public class GeoShapeQueryParser implements QueryParser { - public static final String NAME = "geo_shape"; - - private ShapeFetchService fetchService; - - public static class DEFAULTS { - public static final String INDEX_NAME = "shapes"; - public static final String SHAPE_FIELD_NAME = "shape"; - } + public static final ParseField SHAPE_FIELD = new ParseField("shape"); + public static final ParseField STRATEGY_FIELD = new ParseField("strategy"); + public static final ParseField RELATION_FIELD = new ParseField("relation"); + public static final ParseField INDEXED_SHAPE_FIELD = new ParseField("indexed_shape"); + public static final ParseField SHAPE_ID_FIELD = new ParseField("id"); + public static final ParseField SHAPE_TYPE_FIELD = new ParseField("type"); + public static final ParseField SHAPE_INDEX_FIELD = new ParseField("index"); + public static final ParseField SHAPE_PATH_FIELD = new ParseField("path"); @Override public String[] names() { - return new String[]{NAME, Strings.toCamelCase(NAME)}; + return new String[]{GeoShapeQueryBuilder.NAME, Strings.toCamelCase(GeoShapeQueryBuilder.NAME)}; } @Override - public Query parse(QueryParseContext parseContext) throws IOException, ParsingException { + public GeoShapeQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException { XContentParser parser = parseContext.parser(); String fieldName = null; - ShapeRelation shapeRelation = ShapeRelation.INTERSECTS; - String strategyName = null; - ShapeBuilder shape = null; + ShapeRelation shapeRelation = null; + SpatialStrategy strategy = null; + BytesReference shape = null; String id = null; String type = null; - String index = DEFAULTS.INDEX_NAME; - String shapePath = DEFAULTS.SHAPE_FIELD_NAME; + String index = null; + String shapePath = null; XContentParser.Token token; String currentFieldName = null; - float boost = 1f; + float boost = AbstractQueryBuilder.DEFAULT_BOOST; String queryName = null; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { @@ -84,113 +76,78 @@ public class GeoShapeQueryParser implements QueryParser { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); token = parser.nextToken(); - if ("shape".equals(currentFieldName)) { - shape = ShapeBuilder.parse(parser); - } else if ("strategy".equals(currentFieldName)) { - strategyName = parser.text(); - } else if ("relation".equals(currentFieldName)) { + if (parseContext.parseFieldMatcher().match(currentFieldName, SHAPE_FIELD)) { + XContentBuilder builder = XContentFactory.contentBuilder(parser.contentType()).copyCurrentStructure(parser); + shape = builder.bytes(); + } else if (parseContext.parseFieldMatcher().match(currentFieldName, STRATEGY_FIELD)) { + String strategyName = parser.text(); + strategy = SpatialStrategy.fromString(strategyName); + if (strategy == null) { + throw new ParsingException(parser.getTokenLocation(), "Unknown strategy [" + strategyName + " ]"); + } + } else if (parseContext.parseFieldMatcher().match(currentFieldName, RELATION_FIELD)) { shapeRelation = ShapeRelation.getRelationByName(parser.text()); if (shapeRelation == null) { - throw new ParsingException(parseContext, "Unknown shape operation [" + parser.text() + " ]"); + throw new ParsingException(parser.getTokenLocation(), "Unknown shape operation [" + parser.text() + " ]"); } - } else if ("indexed_shape".equals(currentFieldName) || "indexedShape".equals(currentFieldName)) { + } else if (parseContext.parseFieldMatcher().match(currentFieldName, INDEXED_SHAPE_FIELD)) { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token.isValue()) { - if ("id".equals(currentFieldName)) { + if (parseContext.parseFieldMatcher().match(currentFieldName, SHAPE_ID_FIELD)) { id = parser.text(); - } else if ("type".equals(currentFieldName)) { + } else if (parseContext.parseFieldMatcher().match(currentFieldName, SHAPE_TYPE_FIELD)) { type = parser.text(); - } else if ("index".equals(currentFieldName)) { + } else if (parseContext.parseFieldMatcher().match(currentFieldName, SHAPE_INDEX_FIELD)) { index = parser.text(); - } else if ("path".equals(currentFieldName)) { + } else if (parseContext.parseFieldMatcher().match(currentFieldName, SHAPE_PATH_FIELD)) { shapePath = parser.text(); } } } - if (id == null) { - throw new ParsingException(parseContext, "ID for indexed shape not provided"); - } else if (type == null) { - throw new ParsingException(parseContext, "Type for indexed shape not provided"); - } - GetRequest getRequest = new GetRequest(index, type, id); - getRequest.copyContextAndHeadersFrom(SearchContext.current()); - shape = fetchService.fetch(getRequest, shapePath); } else { - throw new ParsingException(parseContext, "[geo_shape] query does not support [" + currentFieldName + "]"); + throw new ParsingException(parser.getTokenLocation(), "[geo_shape] query does not support [" + currentFieldName + "]"); } } } } else if (token.isValue()) { - if ("boost".equals(currentFieldName)) { + if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) { boost = parser.floatValue(); - } else if ("_name".equals(currentFieldName)) { + } else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) { queryName = parser.text(); } else { - throw new ParsingException(parseContext, "[geo_shape] query does not support [" + currentFieldName + "]"); + throw new ParsingException(parser.getTokenLocation(), "[geo_shape] query does not support [" + currentFieldName + "]"); } } } - - if (shape == null) { - throw new ParsingException(parseContext, "No Shape defined"); - } else if (shapeRelation == null) { - throw new ParsingException(parseContext, "No Shape Relation defined"); - } - - MappedFieldType fieldType = parseContext.fieldMapper(fieldName); - if (fieldType == null) { - throw new ParsingException(parseContext, "Failed to find geo_shape field [" + fieldName + "]"); - } - - // TODO: This isn't the nicest way to check this - if (!(fieldType instanceof GeoShapeFieldMapper.GeoShapeFieldType)) { - throw new ParsingException(parseContext, "Field [" + fieldName + "] is not a geo_shape"); - } - - GeoShapeFieldMapper.GeoShapeFieldType shapeFieldType = (GeoShapeFieldMapper.GeoShapeFieldType) fieldType; - - PrefixTreeStrategy strategy = shapeFieldType.defaultStrategy(); - if (strategyName != null) { - strategy = shapeFieldType.resolveStrategy(strategyName); - } - Query query; - if (strategy instanceof RecursivePrefixTreeStrategy && shapeRelation == ShapeRelation.DISJOINT) { - // this strategy doesn't support disjoint anymore: but it did before, including creating lucene fieldcache (!) - // in this case, execute disjoint as exists && !intersects - BooleanQuery.Builder bool = new BooleanQuery.Builder(); - Query exists = ExistsQueryParser.newFilter(parseContext, fieldName, null); - Filter intersects = strategy.makeFilter(getArgs(shape, ShapeRelation.INTERSECTS)); - bool.add(exists, BooleanClause.Occur.MUST); - bool.add(intersects, BooleanClause.Occur.MUST_NOT); - query = new ConstantScoreQuery(bool.build()); + GeoShapeQueryBuilder builder; + if (shape != null) { + builder = new GeoShapeQueryBuilder(fieldName, shape); } else { - query = strategy.makeQuery(getArgs(shape, shapeRelation)); + builder = new GeoShapeQueryBuilder(fieldName, id, type); + } + if (index != null) { + builder.indexedShapeIndex(index); + } + if (shapePath != null) { + builder.indexedShapePath(shapePath); + } + if (shapeRelation != null) { + builder.relation(shapeRelation); + } + if (strategy != null) { + builder.strategy(strategy); } - query.setBoost(boost); if (queryName != null) { - parseContext.addNamedQuery(queryName, query); + builder.queryName(queryName); } - return query; + builder.boost(boost); + return builder; } - @Inject(optional = true) - public void setFetchService(@Nullable ShapeFetchService fetchService) { - this.fetchService = fetchService; - } - - public static SpatialArgs getArgs(ShapeBuilder shape, ShapeRelation relation) { - switch(relation) { - case DISJOINT: - return new SpatialArgs(SpatialOperation.IsDisjointTo, shape.build()); - case INTERSECTS: - return new SpatialArgs(SpatialOperation.Intersects, shape.build()); - case WITHIN: - return new SpatialArgs(SpatialOperation.IsWithin, shape.build()); - default: - throw new IllegalArgumentException(""); - - } + @Override + public GeoShapeQueryBuilder getBuilderPrototype() { + return GeoShapeQueryBuilder.PROTOTYPE; } } diff --git a/core/src/main/java/org/elasticsearch/index/query/GeoValidationMethod.java b/core/src/main/java/org/elasticsearch/index/query/GeoValidationMethod.java new file mode 100644 index 00000000000..aa7bd083ee5 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/index/query/GeoValidationMethod.java @@ -0,0 +1,89 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.util.CollectionUtils; + +import java.io.IOException; + +/** + * This enum is used to determine how to deal with invalid geo coordinates in geo related + * queries: + * + * On STRICT validation invalid coordinates cause an exception to be thrown. + * On IGNORE_MALFORMED invalid coordinates are being accepted. + * On COERCE invalid coordinates are being corrected to the most likely valid coordinate. + * */ +public enum GeoValidationMethod implements Writeable{ + COERCE, IGNORE_MALFORMED, STRICT; + + public static final GeoValidationMethod DEFAULT = STRICT; + public static final boolean DEFAULT_LENIENT_PARSING = (DEFAULT != STRICT); + private static final GeoValidationMethod PROTOTYPE = DEFAULT; + + @Override + public GeoValidationMethod readFrom(StreamInput in) throws IOException { + return GeoValidationMethod.values()[in.readVInt()]; + } + + public static GeoValidationMethod readGeoValidationMethodFrom(StreamInput in) throws IOException { + return PROTOTYPE.readFrom(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeVInt(this.ordinal()); + } + + public static GeoValidationMethod fromString(String op) { + for (GeoValidationMethod method : GeoValidationMethod.values()) { + if (method.name().equalsIgnoreCase(op)) { + return method; + } + } + throw new IllegalArgumentException("operator needs to be either " + CollectionUtils.arrayAsArrayList(GeoValidationMethod.values()) + + ", but not [" + op + "]"); + } + + /** Returns whether or not to skip bounding box validation. */ + public static boolean isIgnoreMalformed(GeoValidationMethod method) { + return (method == GeoValidationMethod.IGNORE_MALFORMED || method == GeoValidationMethod.COERCE); + } + + /** Returns whether or not to try and fix broken/wrapping bounding boxes. */ + public static boolean isCoerce(GeoValidationMethod method) { + return method == GeoValidationMethod.COERCE; + } + + /** Returns validation method corresponding to given coerce and ignoreMalformed values. */ + public static GeoValidationMethod infer(boolean coerce, boolean ignoreMalformed) { + if (coerce) { + return GeoValidationMethod.COERCE; + } else if (ignoreMalformed) { + return GeoValidationMethod.IGNORE_MALFORMED; + } else { + return GeoValidationMethod.STRICT; + } + } + +} diff --git a/core/src/main/java/org/elasticsearch/index/query/GeohashCellQuery.java b/core/src/main/java/org/elasticsearch/index/query/GeohashCellQuery.java index 84d38578127..b779d802704 100644 --- a/core/src/main/java/org/elasticsearch/index/query/GeohashCellQuery.java +++ b/core/src/main/java/org/elasticsearch/index/query/GeohashCellQuery.java @@ -23,11 +23,13 @@ import org.apache.lucene.search.Query; import org.apache.lucene.util.XGeoHashUtils; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Strings; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoUtils; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -38,6 +40,7 @@ import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper; import java.io.IOException; import java.util.ArrayList; import java.util.List; +import java.util.Objects; /** * A geohash cell filter that filters {@link GeoPoint}s by their geohashes. Basically the a @@ -57,8 +60,9 @@ import java.util.List; public class GeohashCellQuery { public static final String NAME = "geohash_cell"; - public static final String NEIGHBORS = "neighbors"; - public static final String PRECISION = "precision"; + public static final ParseField NEIGHBORS_FIELD = new ParseField("neighbors"); + public static final ParseField PRECISION_FIELD = new ParseField("precision"); + public static final boolean DEFAULT_NEIGHBORS = false; /** * Create a new geohash filter for a given set of geohashes. In general this method @@ -70,7 +74,7 @@ public class GeohashCellQuery { * @param geohashes optional array of additional geohashes * @return a new GeoBoundinboxfilter */ - public static Query create(QueryParseContext context, GeoPointFieldMapper.GeoPointFieldType fieldType, String geohash, @Nullable List geohashes) { + public static Query create(QueryShardContext context, GeoPointFieldMapper.GeoPointFieldType fieldType, String geohash, @Nullable List geohashes) { MappedFieldType geoHashMapper = fieldType.geohashFieldType(); if (geoHashMapper == null) { throw new IllegalArgumentException("geohash filter needs geohash_prefix to be enabled"); @@ -89,23 +93,20 @@ public class GeohashCellQuery { * geohash to be set. the default for a neighbor filteing is * false. */ - public static class Builder extends QueryBuilder { + public static class Builder extends AbstractQueryBuilder { // we need to store the geohash rather than the corresponding point, // because a transformation from a geohash to a point an back to the // geohash will extend the accuracy of the hash to max precision // i.e. by filing up with z's. - private String field; + private String fieldName; private String geohash; - private int levels = -1; - private boolean neighbors; + private Integer levels = null; + private boolean neighbors = DEFAULT_NEIGHBORS; + private static final Builder PROTOTYPE = new Builder("field", new GeoPoint()); - public Builder(String field) { - this(field, null, false); - } - public Builder(String field, GeoPoint point) { - this(field, point.geohash(), false); + this(field, point == null ? null : point.geohash(), false); } public Builder(String field, String geohash) { @@ -113,8 +114,13 @@ public class GeohashCellQuery { } public Builder(String field, String geohash, boolean neighbors) { - super(); - this.field = field; + if (Strings.isEmpty(field)) { + throw new IllegalArgumentException("fieldName must not be null"); + } + if (Strings.isEmpty(geohash)) { + throw new IllegalArgumentException("geohash or point must be defined"); + } + this.fieldName = field; this.geohash = geohash; this.neighbors = neighbors; } @@ -134,11 +140,22 @@ public class GeohashCellQuery { return this; } + public String geohash() { + return geohash; + } + public Builder precision(int levels) { + if (levels <= 0) { + throw new IllegalArgumentException("precision must be greater than 0. Found [" + levels + "]"); + } this.levels = levels; return this; } + public Integer precision() { + return levels; + } + public Builder precision(String precision) { double meters = DistanceUnit.parse(precision, DistanceUnit.DEFAULT, DistanceUnit.METERS); return precision(GeoUtils.geoHashLevelsForPrecision(meters)); @@ -149,27 +166,107 @@ public class GeohashCellQuery { return this; } - public Builder field(String field) { - this.field = field; + public boolean neighbors() { + return neighbors; + } + + public Builder fieldName(String fieldName) { + this.fieldName = fieldName; return this; } + public String fieldName() { + return fieldName; + } + + @Override + protected Query doToQuery(QueryShardContext context) throws IOException { + MappedFieldType fieldType = context.fieldMapper(fieldName); + if (fieldType == null) { + throw new QueryShardException(context, "failed to parse [{}] query. missing [{}] field [{}]", NAME, + GeoPointFieldMapper.CONTENT_TYPE, fieldName); + } + + if (!(fieldType instanceof GeoPointFieldMapper.GeoPointFieldType)) { + throw new QueryShardException(context, "failed to parse [{}] query. field [{}] is not a geo_point field", NAME, fieldName); + } + + GeoPointFieldMapper.GeoPointFieldType geoFieldType = ((GeoPointFieldMapper.GeoPointFieldType) fieldType); + if (!geoFieldType.isGeohashPrefixEnabled()) { + throw new QueryShardException(context, "failed to parse [{}] query. [geohash_prefix] is not enabled for field [{}]", NAME, + fieldName); + } + + if (levels != null) { + int len = Math.min(levels, geohash.length()); + geohash = geohash.substring(0, len); + } + + Query query; + if (neighbors) { + query = create(context, geoFieldType, geohash, XGeoHashUtils.addNeighbors(geohash, new ArrayList(8))); + } else { + query = create(context, geoFieldType, geohash, null); + } + return query; + } + @Override protected void doXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(NAME); - if (neighbors) { - builder.field(NEIGHBORS, neighbors); + builder.field(NEIGHBORS_FIELD.getPreferredName(), neighbors); + if (levels != null) { + builder.field(PRECISION_FIELD.getPreferredName(), levels); } - if(levels > 0) { - builder.field(PRECISION, levels); - } - builder.field(field, geohash); - + builder.field(fieldName, geohash); + printBoostAndQueryName(builder); builder.endObject(); } + + @Override + protected Builder doReadFrom(StreamInput in) throws IOException { + String field = in.readString(); + String geohash = in.readString(); + Builder builder = new Builder(field, geohash); + if (in.readBoolean()) { + builder.precision(in.readVInt()); + } + builder.neighbors(in.readBoolean()); + return builder; + } + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + out.writeString(fieldName); + out.writeString(geohash); + boolean hasLevels = levels != null; + out.writeBoolean(hasLevels); + if (hasLevels) { + out.writeVInt(levels); + } + out.writeBoolean(neighbors); + } + + @Override + protected boolean doEquals(Builder other) { + return Objects.equals(fieldName, other.fieldName) + && Objects.equals(geohash, other.geohash) + && Objects.equals(levels, other.levels) + && Objects.equals(neighbors, other.neighbors); + } + + @Override + protected int doHashCode() { + return Objects.hash(fieldName, geohash, levels, neighbors); + } + + @Override + public String getWriteableName() { + return NAME; + } } - public static class Parser implements QueryParser { + public static class Parser implements QueryParser { @Inject public Parser() { @@ -181,14 +278,15 @@ public class GeohashCellQuery { } @Override - public Query parse(QueryParseContext parseContext) throws IOException, ParsingException { + public Builder fromXContent(QueryParseContext parseContext) throws IOException { XContentParser parser = parseContext.parser(); String fieldName = null; String geohash = null; - int levels = -1; - boolean neighbors = false; - + Integer levels = null; + Boolean neighbors = null; + String queryName = null; + Float boost = null; XContentParser.Token token; if ((token = parser.currentToken()) != Token.START_OBJECT) { @@ -201,24 +299,31 @@ public class GeohashCellQuery { if (parseContext.isDeprecatedSetting(field)) { // skip - } else if (PRECISION.equals(field)) { + } else if (parseContext.parseFieldMatcher().match(field, PRECISION_FIELD)) { token = parser.nextToken(); - if(token == Token.VALUE_NUMBER) { + if (token == Token.VALUE_NUMBER) { levels = parser.intValue(); - } else if(token == Token.VALUE_STRING) { + } else if (token == Token.VALUE_STRING) { double meters = DistanceUnit.parse(parser.text(), DistanceUnit.DEFAULT, DistanceUnit.METERS); levels = GeoUtils.geoHashLevelsForPrecision(meters); } - } else if (NEIGHBORS.equals(field)) { + } else if (parseContext.parseFieldMatcher().match(field, NEIGHBORS_FIELD)) { parser.nextToken(); neighbors = parser.booleanValue(); + } else if (parseContext.parseFieldMatcher().match(field, AbstractQueryBuilder.NAME_FIELD)) { + parser.nextToken(); + queryName = parser.text(); + } else if (parseContext.parseFieldMatcher().match(field, AbstractQueryBuilder.BOOST_FIELD)) { + parser.nextToken(); + boost = parser.floatValue(); } else { fieldName = field; token = parser.nextToken(); - if(token == Token.VALUE_STRING) { - // A string indicates either a gehash or a lat/lon string + if (token == Token.VALUE_STRING) { + // A string indicates either a geohash or a lat/lon + // string String location = parser.text(); - if(location.indexOf(",")>0) { + if (location.indexOf(",") > 0) { geohash = GeoUtils.parseGeoPoint(parser).geohash(); } else { geohash = location; @@ -231,38 +336,25 @@ public class GeohashCellQuery { throw new ElasticsearchParseException("failed to parse [{}] query. unexpected token [{}]", NAME, token); } } - - if (geohash == null) { - throw new ParsingException(parseContext, "failed to parse [{}] query. missing geohash value", NAME); + Builder builder = new Builder(fieldName, geohash); + if (levels != null) { + builder.precision(levels); } - - MappedFieldType fieldType = parseContext.fieldMapper(fieldName); - if (fieldType == null) { - throw new ParsingException(parseContext, "failed to parse [{}] query. missing [{}] field [{}]", NAME, GeoPointFieldMapper.CONTENT_TYPE, fieldName); + if (neighbors != null) { + builder.neighbors(neighbors); } - - if (!(fieldType instanceof GeoPointFieldMapper.GeoPointFieldType)) { - throw new ParsingException(parseContext, "failed to parse [{}] query. field [{}] is not a geo_point field", NAME, fieldName); + if (queryName != null) { + builder.queryName(queryName); } - - GeoPointFieldMapper.GeoPointFieldType geoFieldType = ((GeoPointFieldMapper.GeoPointFieldType) fieldType); - if (!geoFieldType.isGeohashPrefixEnabled()) { - throw new ParsingException(parseContext, "failed to parse [{}] query. [geohash_prefix] is not enabled for field [{}]", NAME, fieldName); + if (boost != null) { + builder.boost(boost); } + return builder; + } - if(levels > 0) { - int len = Math.min(levels, geohash.length()); - geohash = geohash.substring(0, len); - } - - Query filter; - if (neighbors) { - filter = create(parseContext, geoFieldType, geohash, XGeoHashUtils.addNeighbors(geohash, new ArrayList<>(8))); - } else { - filter = create(parseContext, geoFieldType, geohash, null); - } - - return filter; + @Override + public GeohashCellQuery.Builder getBuilderPrototype() { + return Builder.PROTOTYPE; } } } diff --git a/core/src/main/java/org/elasticsearch/index/query/HasChildQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/HasChildQueryBuilder.java index 58af4c4dd47..3439d8858a6 100644 --- a/core/src/main/java/org/elasticsearch/index/query/HasChildQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/HasChildQueryBuilder.java @@ -18,48 +18,92 @@ */ package org.elasticsearch.index.query; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.MultiDocValues; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.join.JoinUtil; +import org.apache.lucene.search.join.ScoreMode; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.index.query.support.QueryInnerHitBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.fielddata.IndexParentChildFieldData; +import org.elasticsearch.index.fielddata.plain.ParentChildIndexFieldData; +import org.elasticsearch.index.mapper.DocumentMapper; +import org.elasticsearch.index.mapper.internal.ParentFieldMapper; +import org.elasticsearch.index.query.support.QueryInnerHits; +import org.elasticsearch.search.fetch.innerhits.InnerHitsContext; +import org.elasticsearch.search.fetch.innerhits.InnerHitsSubSearchContext; import java.io.IOException; +import java.util.Locale; +import java.util.Objects; -public class HasChildQueryBuilder extends QueryBuilder implements BoostableQueryBuilder { - - private final QueryBuilder queryBuilder; - - private String childType; - - private float boost = 1.0f; - - private String scoreMode; - - private Integer minChildren; - - private Integer maxChildren; - - private String queryName; - - private QueryInnerHitBuilder innerHit = null; - - public HasChildQueryBuilder(String type, QueryBuilder queryBuilder) { - this.childType = type; - this.queryBuilder = queryBuilder; - } +/** + * A query builder for has_child queries. + */ +public class HasChildQueryBuilder extends AbstractQueryBuilder { /** - * Sets the boost for this query. Documents matching this query will (in addition to the normal - * weightings) have their score multiplied by the boost provided. + * The queries name */ - @Override - public HasChildQueryBuilder boost(float boost) { - this.boost = boost; - return this; + public static final String NAME = "has_child"; + + /** + * The default maximum number of children that are required to match for the parent to be considered a match. + */ + public static final int DEFAULT_MAX_CHILDREN = Integer.MAX_VALUE; + /** + * The default minimum number of children that are required to match for the parent to be considered a match. + */ + public static final int DEFAULT_MIN_CHILDREN = 0; + /* + * The default score mode that is used to combine score coming from multiple parent documents. + */ + public static final ScoreMode DEFAULT_SCORE_MODE = ScoreMode.None; + + private final QueryBuilder query; + + private final String type; + + private ScoreMode scoreMode = DEFAULT_SCORE_MODE; + + private int minChildren = DEFAULT_MIN_CHILDREN; + + private int maxChildren = DEFAULT_MAX_CHILDREN; + + private QueryInnerHits queryInnerHits; + + static final HasChildQueryBuilder PROTOTYPE = new HasChildQueryBuilder("", EmptyQueryBuilder.PROTOTYPE); + + public HasChildQueryBuilder(String type, QueryBuilder query, int maxChildren, int minChildren, ScoreMode scoreMode, QueryInnerHits queryInnerHits) { + this(type, query); + scoreMode(scoreMode); + this.maxChildren = maxChildren; + this.minChildren = minChildren; + this.queryInnerHits = queryInnerHits; + } + + public HasChildQueryBuilder(String type, QueryBuilder query) { + if (type == null) { + throw new IllegalArgumentException("[" + NAME + "] requires 'type' field"); + } + if (query == null) { + throw new IllegalArgumentException("[" + NAME + "] requires 'query' field"); + } + this.type = type; + this.query = query; } /** * Defines how the scores from the matching child documents are mapped into the parent document. */ - public HasChildQueryBuilder scoreMode(String scoreMode) { + public HasChildQueryBuilder scoreMode(ScoreMode scoreMode) { + if (scoreMode == null) { + throw new IllegalArgumentException("[" + NAME + "] requires 'score_mode' field"); + } this.scoreMode = scoreMode; return this; } @@ -68,6 +112,9 @@ public class HasChildQueryBuilder extends QueryBuilder implements BoostableQuery * Defines the minimum number of children that are required to match for the parent to be considered a match. */ public HasChildQueryBuilder minChildren(int minChildren) { + if (minChildren < 0) { + throw new IllegalArgumentException("[" + NAME + "] requires non-negative 'min_children' field"); + } this.minChildren = minChildren; return this; } @@ -76,6 +123,9 @@ public class HasChildQueryBuilder extends QueryBuilder implements BoostableQuery * Defines the maximum number of children that are required to match for the parent to be considered a match. */ public HasChildQueryBuilder maxChildren(int maxChildren) { + if (maxChildren < 0) { + throw new IllegalArgumentException("[" + NAME + "] requires non-negative 'max_children' field"); + } this.maxChildren = maxChildren; return this; } @@ -83,45 +133,252 @@ public class HasChildQueryBuilder extends QueryBuilder implements BoostableQuery /** * Sets the query name for the filter that can be used when searching for matched_filters per hit. */ - public HasChildQueryBuilder queryName(String queryName) { - this.queryName = queryName; + public HasChildQueryBuilder innerHit(QueryInnerHits queryInnerHits) { + this.queryInnerHits = queryInnerHits; return this; } /** - * Sets inner hit definition in the scope of this query and reusing the defined type and query. + * Returns inner hit definition in the scope of this query and reusing the defined type and query. */ - public HasChildQueryBuilder innerHit(QueryInnerHitBuilder innerHit) { - this.innerHit = innerHit; - return this; + public QueryInnerHits innerHit() { + return queryInnerHits; } + /** + * Returns the children query to execute. + */ + public QueryBuilder query() { + return query; + } + + /** + * Returns the child type + */ + public String childType() { + return type; + } + + /** + * Returns how the scores from the matching child documents are mapped into the parent document. + */ + public ScoreMode scoreMode() { + return scoreMode; + } + + /** + * Returns the minimum number of children that are required to match for the parent to be considered a match. + * The default is {@value #DEFAULT_MAX_CHILDREN} + */ + public int minChildren() { + return minChildren; + } + + /** + * Returns the maximum number of children that are required to match for the parent to be considered a match. + * The default is {@value #DEFAULT_MIN_CHILDREN} + */ + public int maxChildren() { return maxChildren; } + @Override protected void doXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(HasChildQueryParser.NAME); + builder.startObject(NAME); builder.field("query"); - queryBuilder.toXContent(builder, params); - builder.field("child_type", childType); - if (boost != 1.0f) { - builder.field("boost", boost); - } - if (scoreMode != null) { - builder.field("score_mode", scoreMode); - } - if (minChildren != null) { - builder.field("min_children", minChildren); - } - if (maxChildren != null) { - builder.field("max_children", maxChildren); - } - if (queryName != null) { - builder.field("_name", queryName); - } - if (innerHit != null) { - builder.startObject("inner_hits"); - builder.value(innerHit); - builder.endObject(); + query.toXContent(builder, params); + builder.field("child_type", type); + builder.field("score_mode", scoreMode.name().toLowerCase(Locale.ROOT)); + builder.field("min_children", minChildren); + builder.field("max_children", maxChildren); + printBoostAndQueryName(builder); + if (queryInnerHits != null) { + queryInnerHits.toXContent(builder, params); } builder.endObject(); } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + protected Query doToQuery(QueryShardContext context) throws IOException { + Query innerQuery = query.toQuery(context); + if (innerQuery == null) { + return null; + } + innerQuery.setBoost(boost); + + DocumentMapper childDocMapper = context.mapperService().documentMapper(type); + if (childDocMapper == null) { + throw new QueryShardException(context, "[" + NAME + "] no mapping found for type [" + type + "]"); + } + ParentFieldMapper parentFieldMapper = childDocMapper.parentFieldMapper(); + if (parentFieldMapper.active() == false) { + throw new QueryShardException(context, "[" + NAME + "] _parent field has no parent type configured"); + } + if (queryInnerHits != null) { + try (XContentParser parser = queryInnerHits.getXcontentParser()) { + XContentParser.Token token = parser.nextToken(); + if (token != XContentParser.Token.START_OBJECT) { + throw new IllegalStateException("start object expected but was: [" + token + "]"); + } + InnerHitsSubSearchContext innerHits = context.indexQueryParserService().getInnerHitsQueryParserHelper().parse(parser); + if (innerHits != null) { + ParsedQuery parsedQuery = new ParsedQuery(innerQuery, context.copyNamedQueries()); + InnerHitsContext.ParentChildInnerHits parentChildInnerHits = new InnerHitsContext.ParentChildInnerHits(innerHits.getSubSearchContext(), parsedQuery, null, context.mapperService(), childDocMapper); + String name = innerHits.getName() != null ? innerHits.getName() : type; + context.addInnerHits(name, parentChildInnerHits); + } + } + } + + String parentType = parentFieldMapper.type(); + DocumentMapper parentDocMapper = context.mapperService().documentMapper(parentType); + if (parentDocMapper == null) { + throw new QueryShardException(context, "[" + NAME + "] Type [" + type + "] points to a non existent parent type [" + + parentType + "]"); + } + + if (maxChildren > 0 && maxChildren < minChildren) { + throw new QueryShardException(context, "[" + NAME + "] 'max_children' is less than 'min_children'"); + } + + // wrap the query with type query + innerQuery = Queries.filtered(innerQuery, childDocMapper.typeFilter()); + + final ParentChildIndexFieldData parentChildIndexFieldData = context.getForField(parentFieldMapper.fieldType()); + int maxChildren = maxChildren(); + // 0 in pre 2.x p/c impl means unbounded + if (maxChildren == 0) { + maxChildren = Integer.MAX_VALUE; + } + return new LateParsingQuery(parentDocMapper.typeFilter(), innerQuery, minChildren(), maxChildren, parentType, scoreMode, parentChildIndexFieldData); + } + + final static class LateParsingQuery extends Query { + + private final Query toQuery; + private final Query innerQuery; + private final int minChildren; + private final int maxChildren; + private final String parentType; + private final ScoreMode scoreMode; + private final ParentChildIndexFieldData parentChildIndexFieldData; + + LateParsingQuery(Query toQuery, Query innerQuery, int minChildren, int maxChildren, String parentType, ScoreMode scoreMode, ParentChildIndexFieldData parentChildIndexFieldData) { + this.toQuery = toQuery; + this.innerQuery = innerQuery; + this.minChildren = minChildren; + this.maxChildren = maxChildren; + this.parentType = parentType; + this.scoreMode = scoreMode; + this.parentChildIndexFieldData = parentChildIndexFieldData; + } + + @Override + public Query rewrite(IndexReader reader) throws IOException { + if (getBoost() != 1.0F) { + return super.rewrite(reader); + } + String joinField = ParentFieldMapper.joinField(parentType); + IndexSearcher indexSearcher = new IndexSearcher(reader); + indexSearcher.setQueryCache(null); + IndexParentChildFieldData indexParentChildFieldData = parentChildIndexFieldData.loadGlobal(indexSearcher.getIndexReader()); + MultiDocValues.OrdinalMap ordinalMap = ParentChildIndexFieldData.getOrdinalMap(indexParentChildFieldData, parentType); + return JoinUtil.createJoinQuery(joinField, innerQuery, toQuery, indexSearcher, scoreMode, ordinalMap, minChildren, maxChildren); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + if (!super.equals(o)) return false; + + LateParsingQuery that = (LateParsingQuery) o; + + if (minChildren != that.minChildren) return false; + if (maxChildren != that.maxChildren) return false; + if (!toQuery.equals(that.toQuery)) return false; + if (!innerQuery.equals(that.innerQuery)) return false; + if (!parentType.equals(that.parentType)) return false; + return scoreMode == that.scoreMode; + } + + @Override + public int hashCode() { + int result = super.hashCode(); + result = 31 * result + toQuery.hashCode(); + result = 31 * result + innerQuery.hashCode(); + result = 31 * result + minChildren; + result = 31 * result + maxChildren; + result = 31 * result + parentType.hashCode(); + result = 31 * result + scoreMode.hashCode(); + return result; + } + + @Override + public String toString(String s) { + return "LateParsingQuery {parentType=" + parentType + "}"; + } + + public int getMinChildren() { + return minChildren; + } + + public int getMaxChildren() { + return maxChildren; + } + + public ScoreMode getScoreMode() { + return scoreMode; + } + } + + @Override + protected boolean doEquals(HasChildQueryBuilder that) { + return Objects.equals(query, that.query) + && Objects.equals(type, that.type) + && Objects.equals(scoreMode, that.scoreMode) + && Objects.equals(minChildren, that.minChildren) + && Objects.equals(maxChildren, that.maxChildren) + && Objects.equals(queryInnerHits, that.queryInnerHits); + } + + @Override + protected int doHashCode() { + return Objects.hash(query, type, scoreMode, minChildren, maxChildren, queryInnerHits); + } + + protected HasChildQueryBuilder(StreamInput in) throws IOException { + type = in.readString(); + minChildren = in.readInt(); + maxChildren = in.readInt(); + final int ordinal = in.readVInt(); + scoreMode = ScoreMode.values()[ordinal]; + query = in.readQuery(); + if (in.readBoolean()) { + queryInnerHits = new QueryInnerHits(in); + } + } + + @Override + protected HasChildQueryBuilder doReadFrom(StreamInput in) throws IOException { + return new HasChildQueryBuilder(in); + } + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + out.writeString(type); + out.writeInt(minChildren()); + out.writeInt(maxChildren()); + out.writeVInt(scoreMode.ordinal()); + out.writeQuery(query); + if (queryInnerHits != null) { + out.writeBoolean(true); + queryInnerHits.writeTo(out); + } else { + out.writeBoolean(false); + } + } } diff --git a/core/src/main/java/org/elasticsearch/index/query/HasChildQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/HasChildQueryParser.java index 376764a3fe6..ede48da4550 100644 --- a/core/src/main/java/org/elasticsearch/index/query/HasChildQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/HasChildQueryParser.java @@ -19,82 +19,52 @@ package org.elasticsearch.index.query; -import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.MultiDocValues; -import org.apache.lucene.search.IndexSearcher; -import org.apache.lucene.search.Query; -import org.apache.lucene.search.join.JoinUtil; import org.apache.lucene.search.join.ScoreMode; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.fielddata.IndexParentChildFieldData; -import org.elasticsearch.index.fielddata.plain.ParentChildIndexFieldData; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.internal.ParentFieldMapper; -import org.elasticsearch.index.query.support.InnerHitsQueryParserHelper; -import org.elasticsearch.index.query.support.XContentStructure; -import org.elasticsearch.search.fetch.innerhits.InnerHitsContext; -import org.elasticsearch.search.fetch.innerhits.InnerHitsSubSearchContext; +import org.elasticsearch.index.query.support.QueryInnerHits; import java.io.IOException; /** - * + * A query parser for has_child queries. */ -public class HasChildQueryParser implements QueryParser { +public class HasChildQueryParser implements QueryParser { - public static final String NAME = "has_child"; private static final ParseField QUERY_FIELD = new ParseField("query", "filter"); - private final InnerHitsQueryParserHelper innerHitsQueryParserHelper; - - @Inject - public HasChildQueryParser(InnerHitsQueryParserHelper innerHitsQueryParserHelper) { - this.innerHitsQueryParserHelper = innerHitsQueryParserHelper; - } - @Override public String[] names() { - return new String[] { NAME, Strings.toCamelCase(NAME) }; + return new String[] { HasChildQueryBuilder.NAME, Strings.toCamelCase(HasChildQueryBuilder.NAME) }; } @Override - public Query parse(QueryParseContext parseContext) throws IOException, ParsingException { + public HasChildQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException { XContentParser parser = parseContext.parser(); - - boolean queryFound = false; - float boost = 1.0f; + float boost = AbstractQueryBuilder.DEFAULT_BOOST; String childType = null; - ScoreMode scoreMode = ScoreMode.None; - int minChildren = 0; - int maxChildren = Integer.MAX_VALUE; + ScoreMode scoreMode = HasChildQueryBuilder.DEFAULT_SCORE_MODE; + int minChildren = HasChildQueryBuilder.DEFAULT_MIN_CHILDREN; + int maxChildren = HasChildQueryBuilder.DEFAULT_MAX_CHILDREN; String queryName = null; - InnerHitsSubSearchContext innerHits = null; - + QueryInnerHits queryInnerHits = null; String currentFieldName = null; XContentParser.Token token; - XContentStructure.InnerQuery iq = null; + QueryBuilder iqb = null; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (parseContext.isDeprecatedSetting(currentFieldName)) { // skip } else if (token == XContentParser.Token.START_OBJECT) { - // Usually, the query would be parsed here, but the child - // type may not have been extracted yet, so use the - // XContentStructure. facade to parse if available, - // or delay parsing if not. if (parseContext.parseFieldMatcher().match(currentFieldName, QUERY_FIELD)) { - iq = new XContentStructure.InnerQuery(parseContext, childType == null ? null : new String[] { childType }); - queryFound = true; + iqb = parseContext.parseInnerQueryBuilder(); } else if ("inner_hits".equals(currentFieldName)) { - innerHits = innerHitsQueryParserHelper.parse(parseContext); + queryInnerHits = new QueryInnerHits(parser); } else { - throw new ParsingException(parseContext, "[has_child] query does not support [" + currentFieldName + "]"); + throw new ParsingException(parser.getTokenLocation(), "[has_child] query does not support [" + currentFieldName + "]"); } } else if (token.isValue()) { if ("type".equals(currentFieldName) || "child_type".equals(currentFieldName) || "childType".equals(currentFieldName)) { @@ -110,66 +80,14 @@ public class HasChildQueryParser implements QueryParser { } else if ("_name".equals(currentFieldName)) { queryName = parser.text(); } else { - throw new ParsingException(parseContext, "[has_child] query does not support [" + currentFieldName + "]"); + throw new ParsingException(parser.getTokenLocation(), "[has_child] query does not support [" + currentFieldName + "]"); } } } - if (!queryFound) { - throw new ParsingException(parseContext, "[has_child] requires 'query' field"); - } - if (childType == null) { - throw new ParsingException(parseContext, "[has_child] requires 'type' field"); - } - - Query innerQuery = iq.asQuery(childType); - - if (innerQuery == null) { - return null; - } - innerQuery.setBoost(boost); - - DocumentMapper childDocMapper = parseContext.mapperService().documentMapper(childType); - if (childDocMapper == null) { - throw new ParsingException(parseContext, "[has_child] No mapping for for type [" + childType + "]"); - } - ParentFieldMapper parentFieldMapper = childDocMapper.parentFieldMapper(); - if (parentFieldMapper.active() == false) { - throw new ParsingException(parseContext, "[has_child] _parent field has no parent type configured"); - } - - if (innerHits != null) { - ParsedQuery parsedQuery = new ParsedQuery(innerQuery, parseContext.copyNamedQueries()); - InnerHitsContext.ParentChildInnerHits parentChildInnerHits = new InnerHitsContext.ParentChildInnerHits(innerHits.getSubSearchContext(), parsedQuery, null, parseContext.mapperService(), childDocMapper); - String name = innerHits.getName() != null ? innerHits.getName() : childType; - parseContext.addInnerHits(name, parentChildInnerHits); - } - - String parentType = parentFieldMapper.type(); - DocumentMapper parentDocMapper = parseContext.mapperService().documentMapper(parentType); - if (parentDocMapper == null) { - throw new ParsingException(parseContext, "[has_child] Type [" + childType + "] points to a non existent parent type [" - + parentType + "]"); - } - - if (maxChildren > 0 && maxChildren < minChildren) { - throw new ParsingException(parseContext, "[has_child] 'max_children' is less than 'min_children'"); - } - - // wrap the query with type query - innerQuery = Queries.filtered(innerQuery, childDocMapper.typeFilter()); - - final Query query; - final ParentChildIndexFieldData parentChildIndexFieldData = parseContext.getForField(parentFieldMapper.fieldType()); - query = joinUtilHelper(parentType, parentChildIndexFieldData, parentDocMapper.typeFilter(), scoreMode, innerQuery, minChildren, maxChildren); - if (queryName != null) { - parseContext.addNamedQuery(queryName, query); - } - query.setBoost(boost); - return query; - } - - public static Query joinUtilHelper(String parentType, ParentChildIndexFieldData parentChildIndexFieldData, Query toQuery, ScoreMode scoreMode, Query innerQuery, int minChildren, int maxChildren) throws IOException { - return new LateParsingQuery(toQuery, innerQuery, minChildren, maxChildren, parentType, scoreMode, parentChildIndexFieldData); + HasChildQueryBuilder hasChildQueryBuilder = new HasChildQueryBuilder(childType, iqb, maxChildren, minChildren, scoreMode, queryInnerHits); + hasChildQueryBuilder.queryName(queryName); + hasChildQueryBuilder.boost(boost); + return hasChildQueryBuilder; } public static ScoreMode parseScoreMode(String scoreModeString) { @@ -187,64 +105,8 @@ public class HasChildQueryParser implements QueryParser { throw new IllegalArgumentException("No score mode for child query [" + scoreModeString + "] found"); } - final static class LateParsingQuery extends Query { - - private final Query toQuery; - private final Query innerQuery; - private final int minChildren; - private final int maxChildren; - private final String parentType; - private final ScoreMode scoreMode; - private final ParentChildIndexFieldData parentChildIndexFieldData; - private final Object identity = new Object(); - - LateParsingQuery(Query toQuery, Query innerQuery, int minChildren, int maxChildren, String parentType, ScoreMode scoreMode, ParentChildIndexFieldData parentChildIndexFieldData) { - this.toQuery = toQuery; - this.innerQuery = innerQuery; - this.minChildren = minChildren; - this.maxChildren = maxChildren; - this.parentType = parentType; - this.scoreMode = scoreMode; - this.parentChildIndexFieldData = parentChildIndexFieldData; - } - - @Override - public Query rewrite(IndexReader reader) throws IOException { - if (getBoost() != 1.0F) { - return super.rewrite(reader); - } - String joinField = ParentFieldMapper.joinField(parentType); - IndexSearcher indexSearcher = new IndexSearcher(reader); - indexSearcher.setQueryCache(null); - IndexParentChildFieldData indexParentChildFieldData = parentChildIndexFieldData.loadGlobal(indexSearcher.getIndexReader()); - MultiDocValues.OrdinalMap ordinalMap = ParentChildIndexFieldData.getOrdinalMap(indexParentChildFieldData, parentType); - return JoinUtil.createJoinQuery(joinField, innerQuery, toQuery, indexSearcher, scoreMode, ordinalMap, minChildren, maxChildren); - } - - // Even though we only cache rewritten queries it is good to let all queries implement hashCode() and equals(): - - // We can't check for actually equality here, since we need to IndexReader for this, but - // that isn't available on all cases during query parse time, so instead rely on identity: - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - if (!super.equals(o)) return false; - - LateParsingQuery that = (LateParsingQuery) o; - return identity.equals(that.identity); - } - - @Override - public int hashCode() { - int result = super.hashCode(); - result = 31 * result + identity.hashCode(); - return result; - } - - @Override - public String toString(String s) { - return "LateParsingQuery {parentType=" + parentType + "}"; - } + @Override + public HasChildQueryBuilder getBuilderPrototype() { + return HasChildQueryBuilder.PROTOTYPE; } } diff --git a/core/src/main/java/org/elasticsearch/index/query/HasParentQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/HasParentQueryBuilder.java index 15868fecfbc..23be36fc8f7 100644 --- a/core/src/main/java/org/elasticsearch/index/query/HasParentQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/HasParentQueryBuilder.java @@ -18,83 +18,234 @@ */ package org.elasticsearch.index.query; +import org.apache.lucene.search.BooleanClause; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.join.ScoreMode; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.index.query.support.QueryInnerHitBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.fielddata.plain.ParentChildIndexFieldData; +import org.elasticsearch.index.mapper.DocumentMapper; +import org.elasticsearch.index.mapper.internal.ParentFieldMapper; +import org.elasticsearch.index.query.support.QueryInnerHits; +import org.elasticsearch.search.fetch.innerhits.InnerHitsContext; +import org.elasticsearch.search.fetch.innerhits.InnerHitsSubSearchContext; import java.io.IOException; +import java.util.HashSet; +import java.util.Objects; +import java.util.Set; /** * Builder for the 'has_parent' query. */ -public class HasParentQueryBuilder extends QueryBuilder implements BoostableQueryBuilder { +public class HasParentQueryBuilder extends AbstractQueryBuilder { - private final QueryBuilder queryBuilder; - private final String parentType; - private String scoreMode; - private float boost = 1.0f; - private String queryName; - private QueryInnerHitBuilder innerHit = null; + public static final String NAME = "has_parent"; + public static final boolean DEFAULT_SCORE = false; + private final QueryBuilder query; + private final String type; + private boolean score = DEFAULT_SCORE; + private QueryInnerHits innerHit; /** - * @param parentType The parent type - * @param parentQuery The query that will be matched with parent documents + * @param type The parent type + * @param query The query that will be matched with parent documents */ - public HasParentQueryBuilder(String parentType, QueryBuilder parentQuery) { - this.parentType = parentType; - this.queryBuilder = parentQuery; + public HasParentQueryBuilder(String type, QueryBuilder query) { + if (type == null) { + throw new IllegalArgumentException("[" + NAME + "] requires 'parent_type' field"); + } + if (query == null) { + throw new IllegalArgumentException("[" + NAME + "] requires 'query' field"); + } + this.type = type; + this.query = query; } - @Override - public HasParentQueryBuilder boost(float boost) { - this.boost = boost; - return this; + public HasParentQueryBuilder(String type, QueryBuilder query, boolean score, QueryInnerHits innerHits) { + this(type, query); + this.score = score; + this.innerHit = innerHits; } /** - * Defines how the parent score is mapped into the child documents. + * Defines if the parent score is mapped into the child documents. */ - public HasParentQueryBuilder scoreMode(String scoreMode) { - this.scoreMode = scoreMode; - return this; - } - - /** - * Sets the query name for the filter that can be used when searching for matched_filters per hit. - */ - public HasParentQueryBuilder queryName(String queryName) { - this.queryName = queryName; + public HasParentQueryBuilder score(boolean score) { + this.score = score; return this; } /** * Sets inner hit definition in the scope of this query and reusing the defined type and query. */ - public HasParentQueryBuilder innerHit(QueryInnerHitBuilder innerHit) { + public HasParentQueryBuilder innerHit(QueryInnerHits innerHit) { this.innerHit = innerHit; return this; } + /** + * Returns the query to execute. + */ + public QueryBuilder query() { + return query; + } + + /** + * Returns true if the parent score is mapped into the child documents + */ + public boolean score() { + return score; + } + + /** + * Returns the parents type name + */ + public String type() { + return type; + } + + /** + * Returns inner hit definition in the scope of this query and reusing the defined type and query. + */ + public QueryInnerHits innerHit() { + return innerHit; + } + + @Override + protected Query doToQuery(QueryShardContext context) throws IOException { + Query innerQuery = query.toQuery(context); + if (innerQuery == null) { + return null; + } + innerQuery.setBoost(boost); + DocumentMapper parentDocMapper = context.mapperService().documentMapper(type); + if (parentDocMapper == null) { + throw new QueryShardException(context, "[has_parent] query configured 'parent_type' [" + type + + "] is not a valid type"); + } + + if (innerHit != null) { + try (XContentParser parser = innerHit.getXcontentParser()) { + XContentParser.Token token = parser.nextToken(); + if (token != XContentParser.Token.START_OBJECT) { + throw new IllegalStateException("start object expected but was: [" + token + "]"); + } + InnerHitsSubSearchContext innerHits = context.indexQueryParserService().getInnerHitsQueryParserHelper().parse(parser); + if (innerHits != null) { + ParsedQuery parsedQuery = new ParsedQuery(innerQuery, context.copyNamedQueries()); + InnerHitsContext.ParentChildInnerHits parentChildInnerHits = new InnerHitsContext.ParentChildInnerHits(innerHits.getSubSearchContext(), parsedQuery, null, context.mapperService(), parentDocMapper); + String name = innerHits.getName() != null ? innerHits.getName() : type; + context.addInnerHits(name, parentChildInnerHits); + } + } + } + + Set parentTypes = new HashSet<>(5); + parentTypes.add(parentDocMapper.type()); + ParentChildIndexFieldData parentChildIndexFieldData = null; + for (DocumentMapper documentMapper : context.mapperService().docMappers(false)) { + ParentFieldMapper parentFieldMapper = documentMapper.parentFieldMapper(); + if (parentFieldMapper.active()) { + DocumentMapper parentTypeDocumentMapper = context.mapperService().documentMapper(parentFieldMapper.type()); + parentChildIndexFieldData = context.getForField(parentFieldMapper.fieldType()); + if (parentTypeDocumentMapper == null) { + // Only add this, if this parentFieldMapper (also a parent) isn't a child of another parent. + parentTypes.add(parentFieldMapper.type()); + } + } + } + if (parentChildIndexFieldData == null) { + throw new QueryShardException(context, "[has_parent] no _parent field configured"); + } + + Query parentTypeQuery = null; + if (parentTypes.size() == 1) { + DocumentMapper documentMapper = context.mapperService().documentMapper(parentTypes.iterator().next()); + if (documentMapper != null) { + parentTypeQuery = documentMapper.typeFilter(); + } + } else { + BooleanQuery.Builder parentsFilter = new BooleanQuery.Builder(); + for (String parentTypeStr : parentTypes) { + DocumentMapper documentMapper = context.mapperService().documentMapper(parentTypeStr); + if (documentMapper != null) { + parentsFilter.add(documentMapper.typeFilter(), BooleanClause.Occur.SHOULD); + } + } + parentTypeQuery = parentsFilter.build(); + } + + if (parentTypeQuery == null) { + return null; + } + + // wrap the query with type query + innerQuery = Queries.filtered(innerQuery, parentDocMapper.typeFilter()); + Query childrenFilter = Queries.not(parentTypeQuery); + return new HasChildQueryBuilder.LateParsingQuery(childrenFilter, innerQuery, HasChildQueryBuilder.DEFAULT_MIN_CHILDREN, HasChildQueryBuilder.DEFAULT_MAX_CHILDREN, type, score ? ScoreMode.Max : ScoreMode.None, parentChildIndexFieldData); + } + @Override protected void doXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(HasParentQueryParser.NAME); + builder.startObject(NAME); builder.field("query"); - queryBuilder.toXContent(builder, params); - builder.field("parent_type", parentType); - if (scoreMode != null) { - builder.field("score_mode", scoreMode); - } - if (boost != 1.0f) { - builder.field("boost", boost); - } - if (queryName != null) { - builder.field("_name", queryName); - } + query.toXContent(builder, params); + builder.field("parent_type", type); + builder.field("score", score); + printBoostAndQueryName(builder); if (innerHit != null) { - builder.startObject("inner_hits"); - builder.value(innerHit); - builder.endObject(); + innerHit.toXContent(builder, params); } builder.endObject(); } -} + @Override + public String getWriteableName() { + return NAME; + } + + protected HasParentQueryBuilder(StreamInput in) throws IOException { + type = in.readString(); + score = in.readBoolean(); + query = in.readQuery(); + if (in.readBoolean()) { + innerHit = new QueryInnerHits(in); + } + } + + @Override + protected HasParentQueryBuilder doReadFrom(StreamInput in) throws IOException { + return new HasParentQueryBuilder(in); + } + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + out.writeString(type); + out.writeBoolean(score); + out.writeQuery(query); + if (innerHit != null) { + out.writeBoolean(true); + innerHit.writeTo(out); + } else { + out.writeBoolean(false); + } + } + + @Override + protected boolean doEquals(HasParentQueryBuilder that) { + return Objects.equals(query, that.query) + && Objects.equals(type, that.type) + && Objects.equals(score, that.score) + && Objects.equals(innerHit, that.innerHit); + } + + @Override + protected int doHashCode() { + return Objects.hash(query, type, score, innerHit); + } +} diff --git a/core/src/main/java/org/elasticsearch/index/query/HasParentQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/HasParentQueryParser.java index 432dd071140..39306ed20f5 100644 --- a/core/src/main/java/org/elasticsearch/index/query/HasParentQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/HasParentQueryParser.java @@ -18,178 +18,79 @@ */ package org.elasticsearch.index.query; -import org.apache.lucene.search.*; -import org.apache.lucene.search.join.ScoreMode; + import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.fielddata.plain.ParentChildIndexFieldData; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.internal.ParentFieldMapper; -import org.elasticsearch.index.query.support.InnerHitsQueryParserHelper; -import org.elasticsearch.index.query.support.XContentStructure; -import org.elasticsearch.search.fetch.innerhits.InnerHitsContext; -import org.elasticsearch.search.fetch.innerhits.InnerHitsSubSearchContext; +import org.elasticsearch.index.query.support.QueryInnerHits; import java.io.IOException; -import java.util.HashSet; -import java.util.Set; -import static org.elasticsearch.index.query.HasChildQueryParser.joinUtilHelper; +public class HasParentQueryParser implements QueryParser { -public class HasParentQueryParser implements QueryParser { - - public static final String NAME = "has_parent"; + private static final HasParentQueryBuilder PROTOTYPE = new HasParentQueryBuilder("", EmptyQueryBuilder.PROTOTYPE); private static final ParseField QUERY_FIELD = new ParseField("query", "filter"); - - private final InnerHitsQueryParserHelper innerHitsQueryParserHelper; - - @Inject - public HasParentQueryParser(InnerHitsQueryParserHelper innerHitsQueryParserHelper) { - this.innerHitsQueryParserHelper = innerHitsQueryParserHelper; - } + private static final ParseField SCORE_FIELD = new ParseField("score_mode").withAllDeprecated("score"); + private static final ParseField TYPE_FIELD = new ParseField("parent_type", "type"); @Override public String[] names() { - return new String[]{NAME, Strings.toCamelCase(NAME)}; + return new String[]{HasParentQueryBuilder.NAME, Strings.toCamelCase(HasParentQueryBuilder.NAME)}; } @Override - public Query parse(QueryParseContext parseContext) throws IOException, ParsingException { + public HasParentQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException { XContentParser parser = parseContext.parser(); - boolean queryFound = false; - float boost = 1.0f; + float boost = AbstractQueryBuilder.DEFAULT_BOOST; String parentType = null; - boolean score = false; + boolean score = HasParentQueryBuilder.DEFAULT_SCORE; String queryName = null; - InnerHitsSubSearchContext innerHits = null; + QueryInnerHits innerHits = null; String currentFieldName = null; XContentParser.Token token; - XContentStructure.InnerQuery iq = null; + QueryBuilder iqb = null; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token == XContentParser.Token.START_OBJECT) { - // Usually, the query would be parsed here, but the child - // type may not have been extracted yet, so use the - // XContentStructure. facade to parse if available, - // or delay parsing if not. if (parseContext.parseFieldMatcher().match(currentFieldName, QUERY_FIELD)) { - iq = new XContentStructure.InnerQuery(parseContext, parentType == null ? null : new String[] {parentType}); - queryFound = true; + iqb = parseContext.parseInnerQueryBuilder(); } else if ("inner_hits".equals(currentFieldName)) { - innerHits = innerHitsQueryParserHelper.parse(parseContext); + innerHits = new QueryInnerHits(parser); } else { - throw new ParsingException(parseContext, "[has_parent] query does not support [" + currentFieldName + "]"); + throw new ParsingException(parser.getTokenLocation(), "[has_parent] query does not support [" + currentFieldName + "]"); } } else if (token.isValue()) { - if ("type".equals(currentFieldName) || "parent_type".equals(currentFieldName) || "parentType".equals(currentFieldName)) { + if (parseContext.parseFieldMatcher().match(currentFieldName, TYPE_FIELD)) { parentType = parser.text(); - } else if ("score_mode".equals(currentFieldName) || "scoreMode".equals(currentFieldName)) { + } else if (parseContext.parseFieldMatcher().match(currentFieldName, SCORE_FIELD)) { String scoreModeValue = parser.text(); if ("score".equals(scoreModeValue)) { score = true; } else if ("none".equals(scoreModeValue)) { score = false; + } else { + throw new ParsingException(parser.getTokenLocation(), "[has_parent] query does not support [" + scoreModeValue + "] as an option for score_mode"); } + } else if ("score".equals(currentFieldName)) { + score = parser.booleanValue(); } else if ("boost".equals(currentFieldName)) { boost = parser.floatValue(); } else if ("_name".equals(currentFieldName)) { queryName = parser.text(); } else { - throw new ParsingException(parseContext, "[has_parent] query does not support [" + currentFieldName + "]"); + throw new ParsingException(parser.getTokenLocation(), "[has_parent] query does not support [" + currentFieldName + "]"); } } } - if (!queryFound) { - throw new ParsingException(parseContext, "[has_parent] query requires 'query' field"); - } - if (parentType == null) { - throw new ParsingException(parseContext, "[has_parent] query requires 'parent_type' field"); - } - - Query innerQuery = iq.asQuery(parentType); - - if (innerQuery == null) { - return null; - } - - innerQuery.setBoost(boost); - Query query = createParentQuery(innerQuery, parentType, score, parseContext, innerHits); - if (query == null) { - return null; - } - - query.setBoost(boost); - if (queryName != null) { - parseContext.addNamedQuery(queryName, query); - } - return query; + return new HasParentQueryBuilder(parentType, iqb, score, innerHits).queryName(queryName).boost(boost); } - static Query createParentQuery(Query innerQuery, String parentType, boolean score, QueryParseContext parseContext, InnerHitsSubSearchContext innerHits) throws IOException { - DocumentMapper parentDocMapper = parseContext.mapperService().documentMapper(parentType); - if (parentDocMapper == null) { - throw new ParsingException(parseContext, "[has_parent] query configured 'parent_type' [" + parentType - + "] is not a valid type"); - } - - if (innerHits != null) { - ParsedQuery parsedQuery = new ParsedQuery(innerQuery, parseContext.copyNamedQueries()); - InnerHitsContext.ParentChildInnerHits parentChildInnerHits = new InnerHitsContext.ParentChildInnerHits(innerHits.getSubSearchContext(), parsedQuery, null, parseContext.mapperService(), parentDocMapper); - String name = innerHits.getName() != null ? innerHits.getName() : parentType; - parseContext.addInnerHits(name, parentChildInnerHits); - } - - Set parentTypes = new HashSet<>(5); - parentTypes.add(parentDocMapper.type()); - ParentChildIndexFieldData parentChildIndexFieldData = null; - for (DocumentMapper documentMapper : parseContext.mapperService().docMappers(false)) { - ParentFieldMapper parentFieldMapper = documentMapper.parentFieldMapper(); - if (parentFieldMapper.active()) { - DocumentMapper parentTypeDocumentMapper = parseContext.mapperService().documentMapper(parentFieldMapper.type()); - parentChildIndexFieldData = parseContext.getForField(parentFieldMapper.fieldType()); - if (parentTypeDocumentMapper == null) { - // Only add this, if this parentFieldMapper (also a parent) isn't a child of another parent. - parentTypes.add(parentFieldMapper.type()); - } - } - } - if (parentChildIndexFieldData == null) { - throw new ParsingException(parseContext, "[has_parent] no _parent field configured"); - } - - Query parentTypeQuery = null; - if (parentTypes.size() == 1) { - DocumentMapper documentMapper = parseContext.mapperService().documentMapper(parentTypes.iterator().next()); - if (documentMapper != null) { - parentTypeQuery = documentMapper.typeFilter(); - } - } else { - BooleanQuery.Builder parentsFilter = new BooleanQuery.Builder(); - for (String parentTypeStr : parentTypes) { - DocumentMapper documentMapper = parseContext.mapperService().documentMapper(parentTypeStr); - if (documentMapper != null) { - parentsFilter.add(documentMapper.typeFilter(), BooleanClause.Occur.SHOULD); - } - } - parentTypeQuery = parentsFilter.build(); - } - - if (parentTypeQuery == null) { - return null; - } - - // wrap the query with type query - innerQuery = Queries.filtered(innerQuery, parentDocMapper.typeFilter()); - Query childrenFilter = Queries.not(parentTypeQuery); - ScoreMode scoreMode = score ? ScoreMode.Max : ScoreMode.None; - return joinUtilHelper(parentType, parentChildIndexFieldData, childrenFilter, scoreMode, innerQuery, 0, Integer.MAX_VALUE); + @Override + public HasParentQueryBuilder getBuilderPrototype() { + return PROTOTYPE; } - } diff --git a/core/src/main/java/org/elasticsearch/index/query/IdsQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/IdsQueryBuilder.java index 02c2a17eade..b85db4b66b1 100644 --- a/core/src/main/java/org/elasticsearch/index/query/IdsQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/IdsQueryBuilder.java @@ -19,44 +19,60 @@ package org.elasticsearch.index.query; +import org.apache.lucene.queries.TermsQuery; +import org.apache.lucene.search.Query; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.mapper.Uid; +import org.elasticsearch.index.mapper.internal.UidFieldMapper; import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.List; +import java.util.*; /** * A query that will return only documents matching specific ids (and a type). */ -public class IdsQueryBuilder extends QueryBuilder implements BoostableQueryBuilder { +public class IdsQueryBuilder extends AbstractQueryBuilder { - private final List types; + public static final String NAME = "ids"; - private List values = new ArrayList<>(); + private final Set ids = new HashSet<>(); - private float boost = -1; + private final String[] types; - private String queryName; + static final IdsQueryBuilder PROTOTYPE = new IdsQueryBuilder(); - public IdsQueryBuilder(String... types) { - this.types = types == null ? null : Arrays.asList(types); + /** + * Creates a new IdsQueryBuilder by optionally providing the types of the documents to look for + */ + public IdsQueryBuilder(@Nullable String... types) { + this.types = types; } /** - * Adds ids to the filter. + * Returns the types used in this query + */ + public String[] types() { + return this.types; + } + + /** + * Adds ids to the query. */ public IdsQueryBuilder addIds(String... ids) { - values.addAll(Arrays.asList(ids)); + Collections.addAll(this.ids, ids); return this; } /** - * Adds ids to the filter. + * Adds ids to the query. */ public IdsQueryBuilder addIds(Collection ids) { - values.addAll(ids); + this.ids.addAll(ids); return this; } @@ -75,48 +91,78 @@ public class IdsQueryBuilder extends QueryBuilder implements BoostableQueryBuild } /** - * Sets the boost for this query. Documents matching this query will (in addition to the normal - * weightings) have their score multiplied by the boost provided. + * Returns the ids for the query. */ - @Override - public IdsQueryBuilder boost(float boost) { - this.boost = boost; - return this; - } - - /** - * Sets the query name for the filter that can be used when searching for matched_filters per hit. - */ - public IdsQueryBuilder queryName(String queryName) { - this.queryName = queryName; - return this; + public Set ids() { + return this.ids; } @Override protected void doXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(IdsQueryParser.NAME); + builder.startObject(NAME); if (types != null) { - if (types.size() == 1) { - builder.field("type", types.get(0)); + if (types.length == 1) { + builder.field("type", types[0]); } else { - builder.startArray("types"); - for (Object type : types) { - builder.value(type); - } - builder.endArray(); + builder.array("types", types); } } builder.startArray("values"); - for (Object value : values) { + for (String value : ids) { builder.value(value); } builder.endArray(); - if (boost != -1) { - builder.field("boost", boost); - } - if (queryName != null) { - builder.field("_name", queryName); - } + printBoostAndQueryName(builder); builder.endObject(); } -} \ No newline at end of file + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + protected Query doToQuery(QueryShardContext context) throws IOException { + Query query; + if (this.ids.isEmpty()) { + query = Queries.newMatchNoDocsQuery(); + } else { + Collection typesForQuery; + if (types == null || types.length == 0) { + typesForQuery = context.queryTypes(); + } else if (types.length == 1 && MetaData.ALL.equals(types[0])) { + typesForQuery = context.mapperService().types(); + } else { + typesForQuery = new HashSet<>(); + Collections.addAll(typesForQuery, types); + } + + query = new TermsQuery(UidFieldMapper.NAME, Uid.createUidsForTypesAndIds(typesForQuery, ids)); + } + return query; + } + + @Override + protected IdsQueryBuilder doReadFrom(StreamInput in) throws IOException { + IdsQueryBuilder idsQueryBuilder = new IdsQueryBuilder(in.readStringArray()); + idsQueryBuilder.addIds(in.readStringArray()); + return idsQueryBuilder; + } + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + out.writeStringArray(types); + out.writeStringArray(ids.toArray(new String[ids.size()])); + } + + @Override + protected int doHashCode() { + return Objects.hash(ids, Arrays.hashCode(types)); + } + + @Override + protected boolean doEquals(IdsQueryBuilder other) { + return Objects.equals(ids, other.ids) && + Arrays.equals(types, other.types); + } +} diff --git a/core/src/main/java/org/elasticsearch/index/query/IdsQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/IdsQueryParser.java index 37c8053caf9..0ffd31644e5 100644 --- a/core/src/main/java/org/elasticsearch/index/query/IdsQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/IdsQueryParser.java @@ -19,48 +19,36 @@ package org.elasticsearch.index.query; -import org.apache.lucene.queries.TermsQuery; -import org.apache.lucene.search.Query; -import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.ParsingException; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.lucene.search.Queries; -import org.elasticsearch.common.util.iterable.Iterables; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.mapper.Uid; -import org.elasticsearch.index.mapper.internal.UidFieldMapper; import java.io.IOException; import java.util.ArrayList; -import java.util.Collection; import java.util.Collections; import java.util.List; /** - * + * Parser for ids query */ -public class IdsQueryParser implements QueryParser { - - public static final String NAME = "ids"; - - @Inject - public IdsQueryParser() { - } +public class IdsQueryParser implements QueryParser { @Override public String[] names() { - return new String[]{NAME}; + return new String[]{IdsQueryBuilder.NAME}; } + /** + * @return a QueryBuilder representation of the query passed in as XContent in the parse context + */ @Override - public Query parse(QueryParseContext parseContext) throws IOException, ParsingException { + public IdsQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException { XContentParser parser = parseContext.parser(); - - List ids = new ArrayList<>(); - Collection types = null; - String currentFieldName = null; - float boost = 1.0f; + List ids = new ArrayList<>(); + List types = new ArrayList<>(); + float boost = AbstractQueryBuilder.DEFAULT_BOOST; String queryName = null; + + String currentFieldName = null; XContentParser.Token token; boolean idsProvided = false; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { @@ -72,27 +60,26 @@ public class IdsQueryParser implements QueryParser { while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { if ((token == XContentParser.Token.VALUE_STRING) || (token == XContentParser.Token.VALUE_NUMBER)) { - BytesRef value = parser.utf8BytesOrNull(); - if (value == null) { - throw new ParsingException(parseContext, "No value specified for term filter"); + String id = parser.textOrNull(); + if (id == null) { + throw new ParsingException(parser.getTokenLocation(), "No value specified for term filter"); } - ids.add(value); + ids.add(id); } else { - throw new ParsingException(parseContext, "Illegal value for id, expecting a string or number, got: " + throw new ParsingException(parser.getTokenLocation(), "Illegal value for id, expecting a string or number, got: " + token); } } } else if ("types".equals(currentFieldName) || "type".equals(currentFieldName)) { - types = new ArrayList<>(); while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { String value = parser.textOrNull(); if (value == null) { - throw new ParsingException(parseContext, "No type specified for term filter"); + throw new ParsingException(parser.getTokenLocation(), "No type specified for term filter"); } types.add(value); } } else { - throw new ParsingException(parseContext, "[ids] query does not support [" + currentFieldName + "]"); + throw new ParsingException(parser.getTokenLocation(), "[ids] query does not support [" + currentFieldName + "]"); } } else if (token.isValue()) { if ("type".equals(currentFieldName) || "_type".equals(currentFieldName)) { @@ -102,30 +89,22 @@ public class IdsQueryParser implements QueryParser { } else if ("_name".equals(currentFieldName)) { queryName = parser.text(); } else { - throw new ParsingException(parseContext, "[ids] query does not support [" + currentFieldName + "]"); + throw new ParsingException(parser.getTokenLocation(), "[ids] query does not support [" + currentFieldName + "]"); } } } - if (!idsProvided) { - throw new ParsingException(parseContext, "[ids] query, no ids values provided"); + throw new ParsingException(parser.getTokenLocation(), "[ids] query, no ids values provided"); } - if (ids.isEmpty()) { - return Queries.newMatchNoDocsQuery(); - } - - if (types == null || types.isEmpty()) { - types = parseContext.queryTypes(); - } else if (types.size() == 1 && Iterables.getFirst(types, null).equals("_all")) { - types = parseContext.mapperService().types(); - } - - TermsQuery query = new TermsQuery(UidFieldMapper.NAME, Uid.createUidsForTypesAndIds(types, ids)); - query.setBoost(boost); - if (queryName != null) { - parseContext.addNamedQuery(queryName, query); - } + IdsQueryBuilder query = new IdsQueryBuilder(types.toArray(new String[types.size()])); + query.addIds(ids.toArray(new String[ids.size()])); + query.boost(boost).queryName(queryName); return query; } + + @Override + public IdsQueryBuilder getBuilderPrototype() { + return IdsQueryBuilder.PROTOTYPE; + } } diff --git a/core/src/main/java/org/elasticsearch/index/query/IndexQueryParserService.java b/core/src/main/java/org/elasticsearch/index/query/IndexQueryParserService.java index dd8abbdb299..d4f7491fb11 100644 --- a/core/src/main/java/org/elasticsearch/index/query/IndexQueryParserService.java +++ b/core/src/main/java/org/elasticsearch/index/query/IndexQueryParserService.java @@ -22,12 +22,17 @@ package org.elasticsearch.index.query; import org.apache.lucene.search.Query; import org.apache.lucene.util.CloseableThreadLocal; import org.elasticsearch.Version; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.lucene.search.Queries; +import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; @@ -40,6 +45,7 @@ import org.elasticsearch.index.cache.bitset.BitsetFilterCache; import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.internal.AllFieldMapper; +import org.elasticsearch.index.query.support.InnerHitsQueryParserHelper; import org.elasticsearch.index.settings.IndexSettings; import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.indices.query.IndicesQueriesRegistry; @@ -51,13 +57,16 @@ public class IndexQueryParserService extends AbstractIndexComponent { public static final String DEFAULT_FIELD = "index.query.default_field"; public static final String QUERY_STRING_LENIENT = "index.query_string.lenient"; + public static final String QUERY_STRING_ANALYZE_WILDCARD = "indices.query.query_string.analyze_wildcard"; + public static final String QUERY_STRING_ALLOW_LEADING_WILDCARD = "indices.query.query_string.allowLeadingWildcard"; public static final String PARSE_STRICT = "index.query.parse.strict"; public static final String ALLOW_UNMAPPED = "index.query.parse.allow_unmapped_fields"; + private final InnerHitsQueryParserHelper innerHitsQueryParserHelper; - private CloseableThreadLocal cache = new CloseableThreadLocal() { + private CloseableThreadLocal cache = new CloseableThreadLocal() { @Override - protected QueryParseContext initialValue() { - return new QueryParseContext(index, IndexQueryParserService.this); + protected QueryShardContext initialValue() { + return new QueryShardContext(index, IndexQueryParserService.this); } }; @@ -71,24 +80,33 @@ public class IndexQueryParserService extends AbstractIndexComponent { final IndexCache indexCache; - final IndexFieldDataService fieldDataService; + protected IndexFieldDataService fieldDataService; + + final ClusterService clusterService; + + final IndexNameExpressionResolver indexNameExpressionResolver; final BitsetFilterCache bitsetFilterCache; private final IndicesQueriesRegistry indicesQueriesRegistry; - private String defaultField; - private boolean queryStringLenient; + private final String defaultField; + private final boolean queryStringLenient; + private final boolean queryStringAnalyzeWildcard; + private final boolean queryStringAllowLeadingWildcard; private final ParseFieldMatcher parseFieldMatcher; private final boolean defaultAllowUnmappedFields; + private final Client client; @Inject - public IndexQueryParserService(Index index, @IndexSettings Settings indexSettings, + public IndexQueryParserService(Index index, @IndexSettings Settings indexSettings, Settings settings, IndicesQueriesRegistry indicesQueriesRegistry, ScriptService scriptService, AnalysisService analysisService, MapperService mapperService, IndexCache indexCache, IndexFieldDataService fieldDataService, BitsetFilterCache bitsetFilterCache, - @Nullable SimilarityService similarityService) { + @Nullable SimilarityService similarityService, ClusterService clusterService, + IndexNameExpressionResolver indexNameExpressionResolver, + InnerHitsQueryParserHelper innerHitsQueryParserHelper, Client client) { super(index, indexSettings); this.scriptService = scriptService; this.analysisService = analysisService; @@ -97,12 +115,18 @@ public class IndexQueryParserService extends AbstractIndexComponent { this.indexCache = indexCache; this.fieldDataService = fieldDataService; this.bitsetFilterCache = bitsetFilterCache; + this.clusterService = clusterService; + this.indexNameExpressionResolver = indexNameExpressionResolver; this.defaultField = indexSettings.get(DEFAULT_FIELD, AllFieldMapper.NAME); this.queryStringLenient = indexSettings.getAsBoolean(QUERY_STRING_LENIENT, false); + this.queryStringAnalyzeWildcard = settings.getAsBoolean(QUERY_STRING_ANALYZE_WILDCARD, false); + this.queryStringAllowLeadingWildcard = settings.getAsBoolean(QUERY_STRING_ALLOW_LEADING_WILDCARD, true); this.parseFieldMatcher = new ParseFieldMatcher(indexSettings); this.defaultAllowUnmappedFields = indexSettings.getAsBoolean(ALLOW_UNMAPPED, true); this.indicesQueriesRegistry = indicesQueriesRegistry; + this.innerHitsQueryParserHelper = innerHitsQueryParserHelper; + this.client = client; } public void close() { @@ -113,56 +137,24 @@ public class IndexQueryParserService extends AbstractIndexComponent { return this.defaultField; } + public boolean queryStringAnalyzeWildcard() { + return this.queryStringAnalyzeWildcard; + } + + public boolean queryStringAllowLeadingWildcard() { + return this.queryStringAllowLeadingWildcard; + } + public boolean queryStringLenient() { return this.queryStringLenient; } - public QueryParser queryParser(String name) { - return indicesQueriesRegistry.queryParsers().get(name); - } - - public ParsedQuery parse(QueryBuilder queryBuilder) { - XContentParser parser = null; - try { - BytesReference bytes = queryBuilder.buildAsBytes(); - parser = XContentFactory.xContent(bytes).createParser(bytes); - return parse(cache.get(), parser); - } catch (ParsingException e) { - throw e; - } catch (Exception e) { - throw new ParsingException(getParseContext(), "Failed to parse", e); - } finally { - if (parser != null) { - parser.close(); - } - } - } - - public ParsedQuery parse(byte[] source) { - return parse(source, 0, source.length); - } - - public ParsedQuery parse(byte[] source, int offset, int length) { - XContentParser parser = null; - try { - parser = XContentFactory.xContent(source, offset, length).createParser(source, offset, length); - return parse(cache.get(), parser); - } catch (ParsingException e) { - throw e; - } catch (Exception e) { - throw new ParsingException(getParseContext(), "Failed to parse", e); - } finally { - if (parser != null) { - parser.close(); - } - } + IndicesQueriesRegistry indicesQueriesRegistry() { + return indicesQueriesRegistry; } public ParsedQuery parse(BytesReference source) { - return parse(cache.get(), source); - } - - public ParsedQuery parse(QueryParseContext context, BytesReference source) { + QueryShardContext context = cache.get(); XContentParser parser = null; try { parser = XContentFactory.xContent(source).createParser(source); @@ -170,23 +162,7 @@ public class IndexQueryParserService extends AbstractIndexComponent { } catch (ParsingException e) { throw e; } catch (Exception e) { - throw new ParsingException(context, "Failed to parse", e); - } finally { - if (parser != null) { - parser.close(); - } - } - } - - public ParsedQuery parse(String source) throws ParsingException { - XContentParser parser = null; - try { - parser = XContentFactory.xContent(source).createParser(source); - return innerParse(cache.get(), parser); - } catch (ParsingException e) { - throw e; - } catch (Exception e) { - throw new ParsingException(getParseContext(), "Failed to parse [" + source + "]", e); + throw new ParsingException(parser == null ? null : parser.getTokenLocation(), "Failed to parse", e); } finally { if (parser != null) { parser.close(); @@ -195,14 +171,10 @@ public class IndexQueryParserService extends AbstractIndexComponent { } public ParsedQuery parse(XContentParser parser) { - return parse(cache.get(), parser); - } - - public ParsedQuery parse(QueryParseContext context, XContentParser parser) { try { - return innerParse(context, parser); - } catch (IOException e) { - throw new ParsingException(context, "Failed to parse", e); + return innerParse(cache.get(), parser); + } catch(IOException e) { + throw new ParsingException(parser.getTokenLocation(), "Failed to parse", e); } } @@ -211,10 +183,11 @@ public class IndexQueryParserService extends AbstractIndexComponent { */ @Nullable public ParsedQuery parseInnerFilter(XContentParser parser) throws IOException { - QueryParseContext context = cache.get(); + QueryShardContext context = cache.get(); context.reset(parser); try { - Query filter = context.parseInnerFilter(); + context.parseFieldMatcher(parseFieldMatcher); + Query filter = context.parseContext().parseInnerQueryBuilder().toFilter(context); if (filter == null) { return null; } @@ -225,27 +198,15 @@ public class IndexQueryParserService extends AbstractIndexComponent { } @Nullable - public Query parseInnerQuery(XContentParser parser) throws IOException { - QueryParseContext context = cache.get(); - context.reset(parser); - try { - return context.parseInnerQuery(); - } finally { - context.reset(null); - } - } - - @Nullable - public Query parseInnerQuery(QueryParseContext parseContext) throws IOException { - parseContext.parseFieldMatcher(parseFieldMatcher); - Query query = parseContext.parseInnerQuery(); + public Query parseInnerQuery(QueryShardContext context) throws IOException { + Query query = context.parseContext().parseInnerQueryBuilder().toQuery(context); if (query == null) { query = Queries.newMatchNoDocsQuery(); } return query; } - public QueryParseContext getParseContext() { + public QueryShardContext getShardContext() { return cache.get(); } @@ -264,9 +225,10 @@ public class IndexQueryParserService extends AbstractIndexComponent { * Selectively parses a query from a top level query or query_binary json field from the specified source. */ public ParsedQuery parseQuery(BytesReference source) { + XContentParser parser = null; try { + parser = XContentHelper.createParser(source); ParsedQuery parsedQuery = null; - XContentParser parser = XContentHelper.createParser(source); for (XContentParser.Token token = parser.nextToken(); token != XContentParser.Token.END_OBJECT; token = parser.nextToken()) { if (token == XContentParser.Token.FIELD_NAME) { String fieldName = parser.currentName(); @@ -277,37 +239,54 @@ public class IndexQueryParserService extends AbstractIndexComponent { XContentParser qSourceParser = XContentFactory.xContent(querySource).createParser(querySource); parsedQuery = parse(qSourceParser); } else { - throw new ParsingException(getParseContext(), "request does not support [" + fieldName + "]"); + throw new ParsingException(parser.getTokenLocation(), "request does not support [" + fieldName + "]"); } } } - if (parsedQuery != null) { - return parsedQuery; + if (parsedQuery == null) { + throw new ParsingException(parser.getTokenLocation(), "Required query is missing"); } - } catch (ParsingException e) { + return parsedQuery; + } catch (ParsingException | QueryShardException e) { throw e; } catch (Throwable e) { - throw new ParsingException(getParseContext(), "Failed to parse", e); + throw new ParsingException(parser == null ? null : parser.getTokenLocation(), "Failed to parse", e); } - - throw new ParsingException(getParseContext(), "Required query is missing"); } - private ParsedQuery innerParse(QueryParseContext parseContext, XContentParser parser) throws IOException, ParsingException { - parseContext.reset(parser); + private ParsedQuery innerParse(QueryShardContext context, XContentParser parser) throws IOException, QueryShardException { + context.reset(parser); try { - parseContext.parseFieldMatcher(parseFieldMatcher); - Query query = parseContext.parseInnerQuery(); + context.parseFieldMatcher(parseFieldMatcher); + Query query = context.parseContext().parseInnerQueryBuilder().toQuery(context); if (query == null) { query = Queries.newMatchNoDocsQuery(); } - return new ParsedQuery(query, parseContext.copyNamedQueries()); + return new ParsedQuery(query, context.copyNamedQueries()); } finally { - parseContext.reset(null); + context.reset(null); } } public ParseFieldMatcher parseFieldMatcher() { return parseFieldMatcher; } + + public boolean matchesIndices(String... indices) { + final String[] concreteIndices = indexNameExpressionResolver.concreteIndices(clusterService.state(), IndicesOptions.lenientExpandOpen(), indices); + for (String index : concreteIndices) { + if (Regex.simpleMatch(index, this.index.name())) { + return true; + } + } + return false; + } + + public InnerHitsQueryParserHelper getInnerHitsQueryParserHelper() { + return innerHitsQueryParserHelper; + } + + public Client getClient() { + return client; + } } diff --git a/core/src/main/java/org/elasticsearch/index/query/IndicesQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/IndicesQueryBuilder.java index 7c2af81b268..b4c7b53a99e 100644 --- a/core/src/main/java/org/elasticsearch/index/query/IndicesQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/IndicesQueryBuilder.java @@ -19,69 +19,133 @@ package org.elasticsearch.index.query; +import org.apache.lucene.search.Query; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import java.io.IOException; +import java.util.Arrays; +import java.util.Objects; /** * A query that will execute the wrapped query only for the specified indices, and "match_all" when * it does not match those indices (by default). */ -public class IndicesQueryBuilder extends QueryBuilder { +public class IndicesQueryBuilder extends AbstractQueryBuilder { - private final QueryBuilder queryBuilder; + public static final String NAME = "indices"; + + private final QueryBuilder innerQuery; private final String[] indices; - private String sNoMatchQuery; - private QueryBuilder noMatchQuery; + private QueryBuilder noMatchQuery = defaultNoMatchQuery(); - private String queryName; + static final IndicesQueryBuilder PROTOTYPE = new IndicesQueryBuilder(EmptyQueryBuilder.PROTOTYPE, "index"); - public IndicesQueryBuilder(QueryBuilder queryBuilder, String... indices) { - this.queryBuilder = queryBuilder; + public IndicesQueryBuilder(QueryBuilder innerQuery, String... indices) { + if (innerQuery == null) { + throw new IllegalArgumentException("inner query cannot be null"); + } + if (indices == null || indices.length == 0) { + throw new IllegalArgumentException("list of indices cannot be null or empty"); + } + this.innerQuery = Objects.requireNonNull(innerQuery); this.indices = indices; } - /** - * Sets the no match query, can either be all or none. - */ - public IndicesQueryBuilder noMatchQuery(String type) { - this.sNoMatchQuery = type; - return this; + public QueryBuilder innerQuery() { + return this.innerQuery; + } + + public String[] indices() { + return this.indices; } /** * Sets the query to use when it executes on an index that does not match the indices provided. */ public IndicesQueryBuilder noMatchQuery(QueryBuilder noMatchQuery) { + if (noMatchQuery == null) { + throw new IllegalArgumentException("noMatch query cannot be null"); + } this.noMatchQuery = noMatchQuery; return this; } /** - * Sets the query name for the filter that can be used when searching for matched_filters per hit. + * Sets the no match query, can either be all or none. */ - public IndicesQueryBuilder queryName(String queryName) { - this.queryName = queryName; + public IndicesQueryBuilder noMatchQuery(String type) { + this.noMatchQuery = IndicesQueryParser.parseNoMatchQuery(type); return this; } + public QueryBuilder noMatchQuery() { + return this.noMatchQuery; + } + + static QueryBuilder defaultNoMatchQuery() { + return QueryBuilders.matchAllQuery(); + } + @Override protected void doXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(IndicesQueryParser.NAME); + builder.startObject(NAME); builder.field("indices", indices); builder.field("query"); - queryBuilder.toXContent(builder, params); - if (noMatchQuery != null) { - builder.field("no_match_query"); - noMatchQuery.toXContent(builder, params); - } else if (sNoMatchQuery != null) { - builder.field("no_match_query", sNoMatchQuery); - } - if (queryName != null) { - builder.field("_name", queryName); - } + innerQuery.toXContent(builder, params); + builder.field("no_match_query"); + noMatchQuery.toXContent(builder, params); + printBoostAndQueryName(builder); builder.endObject(); } -} \ No newline at end of file + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + protected Query doToQuery(QueryShardContext context) throws IOException { + if (context.matchesIndices(indices)) { + return innerQuery.toQuery(context); + } + return noMatchQuery.toQuery(context); + } + + @Override + protected void setFinalBoost(Query query) { + if (boost != DEFAULT_BOOST) { + //if both the wrapped query and the wrapper hold a boost, the main one coming from the wrapper wins + query.setBoost(boost); + } + } + + @Override + protected IndicesQueryBuilder doReadFrom(StreamInput in) throws IOException { + IndicesQueryBuilder indicesQueryBuilder = new IndicesQueryBuilder(in.readQuery(), in.readStringArray()); + indicesQueryBuilder.noMatchQuery = in.readQuery(); + return indicesQueryBuilder; + } + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + out.writeQuery(innerQuery); + out.writeStringArray(indices); + out.writeQuery(noMatchQuery); + } + + @Override + public int doHashCode() { + return Objects.hash(innerQuery, noMatchQuery, Arrays.hashCode(indices)); + } + + @Override + protected boolean doEquals(IndicesQueryBuilder other) { + return Objects.equals(innerQuery, other.innerQuery) && + Arrays.equals(indices, other.indices) && // otherwise we are comparing pointers + Objects.equals(noMatchQuery, other.noMatchQuery); + } +} diff --git a/core/src/main/java/org/elasticsearch/index/query/IndicesQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/IndicesQueryParser.java index b0a23b41c52..674cad70872 100644 --- a/core/src/main/java/org/elasticsearch/index/query/IndicesQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/IndicesQueryParser.java @@ -19,147 +19,107 @@ package org.elasticsearch.index.query; -import org.apache.lucene.search.Query; -import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.cluster.ClusterService; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.lucene.search.Queries; -import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.query.support.XContentStructure; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; /** + * Parser for {@link IndicesQueryBuilder}. */ public class IndicesQueryParser implements QueryParser { - public static final String NAME = "indices"; private static final ParseField QUERY_FIELD = new ParseField("query", "filter"); private static final ParseField NO_MATCH_QUERY = new ParseField("no_match_query", "no_match_filter"); - @Nullable - private final ClusterService clusterService; - private final IndexNameExpressionResolver indexNameExpressionResolver; - - @Inject - public IndicesQueryParser(@Nullable ClusterService clusterService, IndexNameExpressionResolver indexNameExpressionResolver) { - this.clusterService = clusterService; - this.indexNameExpressionResolver = indexNameExpressionResolver; - } - @Override public String[] names() { - return new String[]{NAME}; + return new String[]{IndicesQueryBuilder.NAME}; } @Override - public Query parse(QueryParseContext parseContext) throws IOException, ParsingException { + public QueryBuilder fromXContent(QueryParseContext parseContext) throws IOException, ParsingException { XContentParser parser = parseContext.parser(); - Query noMatchQuery = null; - boolean queryFound = false; - boolean indicesFound = false; - boolean currentIndexMatchesIndices = false; + QueryBuilder innerQuery = null; + Collection indices = new ArrayList<>(); + QueryBuilder noMatchQuery = IndicesQueryBuilder.defaultNoMatchQuery(); + String queryName = null; + float boost = AbstractQueryBuilder.DEFAULT_BOOST; String currentFieldName = null; XContentParser.Token token; - XContentStructure.InnerQuery innerQuery = null; - XContentStructure.InnerQuery innerNoMatchQuery = null; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token == XContentParser.Token.START_OBJECT) { if (parseContext.parseFieldMatcher().match(currentFieldName, QUERY_FIELD)) { - innerQuery = new XContentStructure.InnerQuery(parseContext, (String[])null); - queryFound = true; + innerQuery = parseContext.parseInnerQueryBuilder(); } else if (parseContext.parseFieldMatcher().match(currentFieldName, NO_MATCH_QUERY)) { - innerNoMatchQuery = new XContentStructure.InnerQuery(parseContext, (String[])null); + noMatchQuery = parseContext.parseInnerQueryBuilder(); } else { - throw new ParsingException(parseContext, "[indices] query does not support [" + currentFieldName + "]"); + throw new ParsingException(parser.getTokenLocation(), "[indices] query does not support [" + currentFieldName + "]"); } } else if (token == XContentParser.Token.START_ARRAY) { if ("indices".equals(currentFieldName)) { - if (indicesFound) { - throw new ParsingException(parseContext, "[indices] indices or index already specified"); + if (indices.isEmpty() == false) { + throw new ParsingException(parser.getTokenLocation(), "[indices] indices or index already specified"); } - indicesFound = true; - Collection indices = new ArrayList<>(); while (parser.nextToken() != XContentParser.Token.END_ARRAY) { String value = parser.textOrNull(); if (value == null) { - throw new ParsingException(parseContext, "[indices] no value specified for 'indices' entry"); + throw new ParsingException(parser.getTokenLocation(), "[indices] no value specified for 'indices' entry"); } indices.add(value); } - currentIndexMatchesIndices = matchesIndices(parseContext.index().name(), indices.toArray(new String[indices.size()])); } else { - throw new ParsingException(parseContext, "[indices] query does not support [" + currentFieldName + "]"); + throw new ParsingException(parser.getTokenLocation(), "[indices] query does not support [" + currentFieldName + "]"); } } else if (token.isValue()) { if ("index".equals(currentFieldName)) { - if (indicesFound) { - throw new ParsingException(parseContext, "[indices] indices or index already specified"); + if (indices.isEmpty() == false) { + throw new ParsingException(parser.getTokenLocation(), "[indices] indices or index already specified"); } - indicesFound = true; - currentIndexMatchesIndices = matchesIndices(parseContext.index().name(), parser.text()); + indices.add(parser.text()); } else if (parseContext.parseFieldMatcher().match(currentFieldName, NO_MATCH_QUERY)) { - String type = parser.text(); - if ("all".equals(type)) { - noMatchQuery = Queries.newMatchAllQuery(); - } else if ("none".equals(type)) { - noMatchQuery = Queries.newMatchNoDocsQuery(); - } + noMatchQuery = parseNoMatchQuery(parser.text()); } else if ("_name".equals(currentFieldName)) { queryName = parser.text(); + } else if ("boost".equals(currentFieldName)) { + boost = parser.floatValue(); } else { - throw new ParsingException(parseContext, "[indices] query does not support [" + currentFieldName + "]"); + throw new ParsingException(parser.getTokenLocation(), "[indices] query does not support [" + currentFieldName + "]"); } } } - if (!queryFound) { - throw new ParsingException(parseContext, "[indices] requires 'query' element"); - } - if (!indicesFound) { - throw new ParsingException(parseContext, "[indices] requires 'indices' or 'index' element"); - } - Query chosenQuery; - if (currentIndexMatchesIndices) { - chosenQuery = innerQuery.asQuery(); - } else { - // If noMatchQuery is set, it means "no_match_query" was "all" or "none" - if (noMatchQuery != null) { - chosenQuery = noMatchQuery; - } else { - // There might be no "no_match_query" set, so default to the match_all if not set - if (innerNoMatchQuery == null) { - chosenQuery = Queries.newMatchAllQuery(); - } else { - chosenQuery = innerNoMatchQuery.asQuery(); - } - } + if (innerQuery == null) { + throw new ParsingException(parser.getTokenLocation(), "[indices] requires 'query' element"); } - if (queryName != null) { - parseContext.addNamedQuery(queryName, chosenQuery); + if (indices.isEmpty()) { + throw new ParsingException(parser.getTokenLocation(), "[indices] requires 'indices' or 'index' element"); } - return chosenQuery; + return new IndicesQueryBuilder(innerQuery, indices.toArray(new String[indices.size()])) + .noMatchQuery(noMatchQuery) + .boost(boost) + .queryName(queryName); } - protected boolean matchesIndices(String currentIndex, String... indices) { - final String[] concreteIndices = indexNameExpressionResolver.concreteIndices(clusterService.state(), IndicesOptions.lenientExpandOpen(), indices); - for (String index : concreteIndices) { - if (Regex.simpleMatch(index, currentIndex)) { - return true; - } + static QueryBuilder parseNoMatchQuery(String type) { + if ("all".equals(type)) { + return QueryBuilders.matchAllQuery(); + } else if ("none".equals(type)) { + return new MatchNoneQueryBuilder(); } - return false; + throw new IllegalArgumentException("query type can only be [all] or [none] but not " + "[" + type + "]"); + } + + @Override + public IndicesQueryBuilder getBuilderPrototype() { + return IndicesQueryBuilder.PROTOTYPE; } } diff --git a/core/src/main/java/org/elasticsearch/index/query/MatchAllQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/MatchAllQueryBuilder.java index b09bc9f1dc9..934d32f7165 100644 --- a/core/src/main/java/org/elasticsearch/index/query/MatchAllQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/MatchAllQueryBuilder.java @@ -19,6 +19,10 @@ package org.elasticsearch.index.query; +import org.apache.lucene.search.Query; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.xcontent.XContentBuilder; import java.io.IOException; @@ -26,26 +30,46 @@ import java.io.IOException; /** * A query that matches on all documents. */ -public class MatchAllQueryBuilder extends QueryBuilder implements BoostableQueryBuilder { +public class MatchAllQueryBuilder extends AbstractQueryBuilder { - private float boost = -1; + public static final String NAME = "match_all"; - /** - * Sets the boost for this query. Documents matching this query will (in addition to the normal - * weightings) have their score multiplied by the boost provided. - */ - @Override - public MatchAllQueryBuilder boost(float boost) { - this.boost = boost; - return this; - } + static final MatchAllQueryBuilder PROTOTYPE = new MatchAllQueryBuilder(); @Override - public void doXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(MatchAllQueryParser.NAME); - if (boost != -1) { - builder.field("boost", boost); - } + protected void doXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(NAME); + printBoostAndQueryName(builder); builder.endObject(); } -} \ No newline at end of file + + @Override + protected Query doToQuery(QueryShardContext context) throws IOException { + return Queries.newMatchAllQuery(); + } + + @Override + protected boolean doEquals(MatchAllQueryBuilder other) { + return true; + } + + @Override + protected int doHashCode() { + return 0; + } + + @Override + protected MatchAllQueryBuilder doReadFrom(StreamInput in) throws IOException { + return new MatchAllQueryBuilder(); + } + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + //nothing to write really + } + + @Override + public String getWriteableName() { + return NAME; + } +} diff --git a/core/src/main/java/org/elasticsearch/index/query/MatchAllQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/MatchAllQueryParser.java index 8582f543238..770f9c3dd67 100644 --- a/core/src/main/java/org/elasticsearch/index/query/MatchAllQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/MatchAllQueryParser.java @@ -19,58 +19,51 @@ package org.elasticsearch.index.query; -import org.apache.lucene.search.MatchAllDocsQuery; -import org.apache.lucene.search.Query; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; /** - * + * Parser for match_all query */ -public class MatchAllQueryParser implements QueryParser { - - public static final String NAME = "match_all"; - - @Inject - public MatchAllQueryParser() { - } +public class MatchAllQueryParser implements QueryParser { @Override public String[] names() { - return new String[]{NAME, Strings.toCamelCase(NAME)}; + return new String[]{MatchAllQueryBuilder.NAME, Strings.toCamelCase(MatchAllQueryBuilder.NAME)}; } @Override - public Query parse(QueryParseContext parseContext) throws IOException, ParsingException { + public MatchAllQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException { XContentParser parser = parseContext.parser(); - float boost = 1.0f; String currentFieldName = null; - XContentParser.Token token; + String queryName = null; + float boost = AbstractQueryBuilder.DEFAULT_BOOST; while (((token = parser.nextToken()) != XContentParser.Token.END_OBJECT && token != XContentParser.Token.END_ARRAY)) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token.isValue()) { - if ("boost".equals(currentFieldName)) { + if ("_name".equals(currentFieldName)) { + queryName = parser.text(); + } else if ("boost".equals(currentFieldName)) { boost = parser.floatValue(); } else { - throw new ParsingException(parseContext, "[match_all] query does not support [" + currentFieldName + "]"); + throw new ParsingException(parser.getTokenLocation(), "[match_all] query does not support [" + currentFieldName + "]"); } } } - - if (boost == 1.0f) { - return Queries.newMatchAllQuery(); - } - - MatchAllDocsQuery query = new MatchAllDocsQuery(); - query.setBoost(boost); - return query; + MatchAllQueryBuilder queryBuilder = new MatchAllQueryBuilder(); + queryBuilder.boost(boost); + queryBuilder.queryName(queryName); + return queryBuilder; } -} \ No newline at end of file + + @Override + public MatchAllQueryBuilder getBuilderPrototype() { + return MatchAllQueryBuilder.PROTOTYPE; + } +} diff --git a/core/src/main/java/org/elasticsearch/index/query/MatchNoneQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/MatchNoneQueryBuilder.java new file mode 100644 index 00000000000..0c466a43d10 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/index/query/MatchNoneQueryBuilder.java @@ -0,0 +1,79 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query; + +import org.apache.lucene.search.Query; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.lucene.search.Queries; +import org.elasticsearch.common.xcontent.XContentBuilder; + +import java.io.IOException; + +/** + * A query that matches no document. + */ +public class MatchNoneQueryBuilder extends AbstractQueryBuilder { + + public static final String NAME = "match_none"; + + public static final MatchNoneQueryBuilder PROTOTYPE = new MatchNoneQueryBuilder(); + + @Override + protected void doXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(NAME); + builder.endObject(); + } + + @Override + protected Query doToQuery(QueryShardContext context) throws IOException { + return Queries.newMatchNoDocsQuery(); + } + + @Override + protected void setFinalBoost(Query query) { + //no-op this query doesn't support boost + } + + @Override + protected boolean doEquals(MatchNoneQueryBuilder other) { + return true; + } + + @Override + protected int doHashCode() { + return 0; + } + + @Override + protected MatchNoneQueryBuilder doReadFrom(StreamInput in) throws IOException { + return new MatchNoneQueryBuilder(); + } + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + //nothing to write really + } + + @Override + public String getWriteableName() { + return NAME; + } +} diff --git a/core/src/main/java/org/elasticsearch/index/query/MatchNoneQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/MatchNoneQueryParser.java new file mode 100644 index 00000000000..7135836271f --- /dev/null +++ b/core/src/main/java/org/elasticsearch/index/query/MatchNoneQueryParser.java @@ -0,0 +1,51 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query; + +import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; + +public class MatchNoneQueryParser implements QueryParser { + + @Override + public String[] names() { + return new String[]{MatchNoneQueryBuilder.NAME, Strings.toCamelCase(MatchNoneQueryBuilder.NAME)}; + } + + @Override + public MatchNoneQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException { + XContentParser parser = parseContext.parser(); + + XContentParser.Token token = parser.nextToken(); + if (token != XContentParser.Token.END_OBJECT) { + throw new ParsingException(parser.getTokenLocation(), "[match_none] query malformed"); + } + + return new MatchNoneQueryBuilder(); + } + + @Override + public MatchNoneQueryBuilder getBuilderPrototype() { + return MatchNoneQueryBuilder.PROTOTYPE; + } +} diff --git a/core/src/main/java/org/elasticsearch/index/query/MatchQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/MatchQueryBuilder.java index c7c530b1d5d..e959dbee88c 100644 --- a/core/src/main/java/org/elasticsearch/index/query/MatchQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/MatchQueryBuilder.java @@ -19,97 +19,112 @@ package org.elasticsearch.index.query; +import org.apache.lucene.queries.ExtendedCommonTermsQuery; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.FuzzyQuery; +import org.apache.lucene.search.Query; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.query.support.QueryParsers; +import org.elasticsearch.index.search.MatchQuery; import java.io.IOException; import java.util.Locale; +import java.util.Objects; /** * Match query is a query that analyzes the text and constructs a query as the result of the analysis. It * can construct different queries based on the type provided. */ -public class MatchQueryBuilder extends QueryBuilder implements BoostableQueryBuilder { +public class MatchQueryBuilder extends AbstractQueryBuilder { - public enum Operator { - OR, - AND - } + /** The default name for the match query */ + public static final String NAME = "match"; - public enum Type { - /** - * The text is analyzed and terms are added to a boolean query. - */ - BOOLEAN, - /** - * The text is analyzed and used as a phrase query. - */ - PHRASE, - /** - * The text is analyzed and used in a phrase query, with the last term acting as a prefix. - */ - PHRASE_PREFIX - } + /** The default mode terms are combined in a match query */ + public static final Operator DEFAULT_OPERATOR = Operator.OR; - public enum ZeroTermsQuery { - NONE, - ALL - } + /** The default mode match query type */ + public static final MatchQuery.Type DEFAULT_TYPE = MatchQuery.Type.BOOLEAN; - private final String name; + private final String fieldName; - private final Object text; + private final Object value; - private Type type; + private MatchQuery.Type type = DEFAULT_TYPE; - private Operator operator; + private Operator operator = DEFAULT_OPERATOR; private String analyzer; - private Float boost; + private int slop = MatchQuery.DEFAULT_PHRASE_SLOP; - private Integer slop; + private Fuzziness fuzziness = null; - private Fuzziness fuzziness; + private int prefixLength = FuzzyQuery.defaultPrefixLength; - private Integer prefixLength; + private int maxExpansions = FuzzyQuery.defaultMaxExpansions; - private Integer maxExpansions; + private boolean fuzzyTranspositions = FuzzyQuery.defaultTranspositions; private String minimumShouldMatch; private String fuzzyRewrite = null; - private Boolean lenient; + private boolean lenient = MatchQuery.DEFAULT_LENIENCY; - private Boolean fuzzyTranspositions = null; + private MatchQuery.ZeroTermsQuery zeroTermsQuery = MatchQuery.DEFAULT_ZERO_TERMS_QUERY; - private ZeroTermsQuery zeroTermsQuery; + private Float cutoffFrequency = null; - private Float cutoff_Frequency = null; - - private String queryName; + static final MatchQueryBuilder PROTOTYPE = new MatchQueryBuilder("",""); /** - * Constructs a new text query. + * Constructs a new match query. */ - public MatchQueryBuilder(String name, Object text) { - this.name = name; - this.text = text; + public MatchQueryBuilder(String fieldName, Object value) { + if (fieldName == null) { + throw new IllegalArgumentException("[" + NAME + "] requires fieldName"); + } + if (value == null) { + throw new IllegalArgumentException("[" + NAME + "] requires query value"); + } + this.fieldName = fieldName; + this.value = value; } - /** - * Sets the type of the text query. - */ - public MatchQueryBuilder type(Type type) { + /** Returns the field name used in this query. */ + public String fieldName() { + return this.fieldName; + } + + /** Returns the value used in this query. */ + public Object value() { + return this.value; + } + + /** Sets the type of the text query. */ + public MatchQueryBuilder type(MatchQuery.Type type) { + if (type == null) { + throw new IllegalArgumentException("[" + NAME + "] requires type to be non-null"); + } this.type = type; return this; } - /** - * Sets the operator to use when using a boolean query. Defaults to OR. - */ + /** Get the type of the query. */ + public MatchQuery.Type type() { + return this.type; + } + + /** Sets the operator to use when using a boolean query. Defaults to OR. */ public MatchQueryBuilder operator(Operator operator) { + if (operator == null) { + throw new IllegalArgumentException("[" + NAME + "] requires operator to be non-null"); + } this.operator = operator; return this; } @@ -123,147 +138,326 @@ public class MatchQueryBuilder extends QueryBuilder implements BoostableQueryBui return this; } - /** - * Set the boost to apply to the query. - */ - @Override - public MatchQueryBuilder boost(float boost) { - this.boost = boost; - return this; + /** Get the analyzer to use, if previously set, otherwise null */ + public String analyzer() { + return this.analyzer; } - /** - * Set the phrase slop if evaluated to a phrase query type. - */ + /** Sets a slop factor for phrase queries */ public MatchQueryBuilder slop(int slop) { + if (slop < 0 ) { + throw new IllegalArgumentException("No negative slop allowed."); + } this.slop = slop; return this; } - /** - * Sets the fuzziness used when evaluated to a fuzzy query type. Defaults to "AUTO". - */ + /** Get the slop factor for phrase queries. */ + public int slop() { + return this.slop; + } + + /** Sets the fuzziness used when evaluated to a fuzzy query type. Defaults to "AUTO". */ public MatchQueryBuilder fuzziness(Object fuzziness) { this.fuzziness = Fuzziness.build(fuzziness); return this; } + /** Gets the fuzziness used when evaluated to a fuzzy query type. */ + public Fuzziness fuzziness() { + return this.fuzziness; + } + + /** + * Sets the length of a length of common (non-fuzzy) prefix for fuzzy match queries + * @param prefixLength non-negative length of prefix + * @throws IllegalArgumentException in case the prefix is negative + */ public MatchQueryBuilder prefixLength(int prefixLength) { + if (prefixLength < 0 ) { + throw new IllegalArgumentException("No negative prefix length allowed."); + } this.prefixLength = prefixLength; return this; } /** - * When using fuzzy or prefix type query, the number of term expansions to use. Defaults to unbounded - * so its recommended to set it to a reasonable value for faster execution. + * Gets the length of a length of common (non-fuzzy) prefix for fuzzy match queries + */ + public int prefixLength() { + return this.prefixLength; + } + + /** + * When using fuzzy or prefix type query, the number of term expansions to use. */ public MatchQueryBuilder maxExpansions(int maxExpansions) { + if (maxExpansions < 0 ) { + throw new IllegalArgumentException("No negative maxExpansions allowed."); + } this.maxExpansions = maxExpansions; return this; } + /** + * Get the (optional) number of term expansions when using fuzzy or prefix type query. + */ + public int maxExpansions() { + return this.maxExpansions; + } + /** * Set a cutoff value in [0..1] (or absolute number >=1) representing the * maximum threshold of a terms document frequency to be considered a low * frequency term. */ public MatchQueryBuilder cutoffFrequency(float cutoff) { - this.cutoff_Frequency = cutoff; + this.cutoffFrequency = cutoff; return this; } + /** Gets the optional cutoff value, can be null if not set previously */ + public Float cutoffFrequency() { + return this.cutoffFrequency; + } + + /** Sets optional minimumShouldMatch value to apply to the query */ public MatchQueryBuilder minimumShouldMatch(String minimumShouldMatch) { this.minimumShouldMatch = minimumShouldMatch; return this; } + /** Gets the minimumShouldMatch value */ + public String minimumShouldMatch() { + return this.minimumShouldMatch; + } + + /** Sets the fuzzy_rewrite parameter controlling how the fuzzy query will get rewritten */ public MatchQueryBuilder fuzzyRewrite(String fuzzyRewrite) { this.fuzzyRewrite = fuzzyRewrite; return this; } + /** + * Get the fuzzy_rewrite parameter + * @see #fuzzyRewrite(String) + */ + public String fuzzyRewrite() { + return this.fuzzyRewrite; + } + + /** + * Sets whether transpositions are supported in fuzzy queries.

+ * The default metric used by fuzzy queries to determine a match is the Damerau-Levenshtein + * distance formula which supports transpositions. Setting transposition to false will + * switch to classic Levenshtein distance.
+ * If not set, Damerau-Levenshtein distance metric will be used. + */ public MatchQueryBuilder fuzzyTranspositions(boolean fuzzyTranspositions) { - //LUCENE 4 UPGRADE add documentation this.fuzzyTranspositions = fuzzyTranspositions; return this; } + /** Gets the fuzzy query transposition setting. */ + public boolean fuzzyTranspositions() { + return this.fuzzyTranspositions; + } + + /** + * Sets whether format based failures will be ignored. + * @deprecated use #lenient() instead + */ + @Deprecated + public MatchQueryBuilder setLenient(boolean lenient) { + return lenient(lenient); + } + /** * Sets whether format based failures will be ignored. */ - public MatchQueryBuilder setLenient(boolean lenient) { + public MatchQueryBuilder lenient(boolean lenient) { this.lenient = lenient; return this; } - public MatchQueryBuilder zeroTermsQuery(ZeroTermsQuery zeroTermsQuery) { + /** + * Gets leniency setting that controls if format based failures will be ignored. + */ + public boolean lenient() { + return this.lenient; + } + + /** + * Sets query to use in case no query terms are available, e.g. after analysis removed them. + * Defaults to {@link MatchQuery.ZeroTermsQuery#NONE}, but can be set to + * {@link MatchQuery.ZeroTermsQuery#ALL} instead. + */ + public MatchQueryBuilder zeroTermsQuery(MatchQuery.ZeroTermsQuery zeroTermsQuery) { + if (zeroTermsQuery == null) { + throw new IllegalArgumentException("[" + NAME + "] requires zeroTermsQuery to be non-null"); + } this.zeroTermsQuery = zeroTermsQuery; return this; } /** - * Sets the query name for the filter that can be used when searching for matched_filters per hit. + * Get the setting for handling zero terms queries. + * @see #zeroTermsQuery(ZeroTermsQuery) */ - public MatchQueryBuilder queryName(String queryName) { - this.queryName = queryName; - return this; + public MatchQuery.ZeroTermsQuery zeroTermsQuery() { + return this.zeroTermsQuery; } @Override public void doXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(MatchQueryParser.NAME); - builder.startObject(name); + builder.startObject(NAME); + builder.startObject(fieldName); - builder.field("query", text); - if (type != null) { - builder.field("type", type.toString().toLowerCase(Locale.ENGLISH)); - } - if (operator != null) { - builder.field("operator", operator.toString()); - } + builder.field("query", value); + builder.field("type", type.toString().toLowerCase(Locale.ENGLISH)); + builder.field("operator", operator.toString()); if (analyzer != null) { builder.field("analyzer", analyzer); } - if (boost != null) { - builder.field("boost", boost); - } - if (slop != null) { - builder.field("slop", slop); - } + builder.field("slop", slop); if (fuzziness != null) { fuzziness.toXContent(builder, params); } - if (prefixLength != null) { - builder.field("prefix_length", prefixLength); - } - if (maxExpansions != null) { - builder.field("max_expansions", maxExpansions); - } + builder.field("prefix_length", prefixLength); + builder.field("max_expansions", maxExpansions); if (minimumShouldMatch != null) { builder.field("minimum_should_match", minimumShouldMatch); } if (fuzzyRewrite != null) { builder.field("fuzzy_rewrite", fuzzyRewrite); } - if (fuzzyTranspositions != null) { - //LUCENE 4 UPGRADE we need to document this & test this - builder.field("fuzzy_transpositions", fuzzyTranspositions); + // LUCENE 4 UPGRADE we need to document this & test this + builder.field("fuzzy_transpositions", fuzzyTranspositions); + builder.field("lenient", lenient); + builder.field("zero_terms_query", zeroTermsQuery.toString()); + if (cutoffFrequency != null) { + builder.field("cutoff_frequency", cutoffFrequency); } - if (lenient != null) { - builder.field("lenient", lenient); - } - if (zeroTermsQuery != null) { - builder.field("zero_terms_query", zeroTermsQuery.toString()); - } - if (cutoff_Frequency != null) { - builder.field("cutoff_frequency", cutoff_Frequency); - } - if (queryName != null) { - builder.field("_name", queryName); - } - - + printBoostAndQueryName(builder); builder.endObject(); builder.endObject(); } + + @Override + protected Query doToQuery(QueryShardContext context) throws IOException { + // validate context specific fields + if (analyzer != null && context.analysisService().analyzer(analyzer) == null) { + throw new QueryShardException(context, "[match] analyzer [" + analyzer + "] not found"); + } + + MatchQuery matchQuery = new MatchQuery(context); + matchQuery.setOccur(operator.toBooleanClauseOccur()); + matchQuery.setAnalyzer(analyzer); + matchQuery.setPhraseSlop(slop); + matchQuery.setFuzziness(fuzziness); + matchQuery.setFuzzyPrefixLength(prefixLength); + matchQuery.setMaxExpansions(maxExpansions); + matchQuery.setTranspositions(fuzzyTranspositions); + matchQuery.setFuzzyRewriteMethod(QueryParsers.parseRewriteMethod(context.parseFieldMatcher(), fuzzyRewrite, null)); + matchQuery.setLenient(lenient); + matchQuery.setCommonTermsCutoff(cutoffFrequency); + matchQuery.setZeroTermsQuery(zeroTermsQuery); + + Query query = matchQuery.parse(type, fieldName, value); + if (query == null) { + return null; + } + + if (query instanceof BooleanQuery) { + query = Queries.applyMinimumShouldMatch((BooleanQuery) query, minimumShouldMatch); + } else if (query instanceof ExtendedCommonTermsQuery) { + ((ExtendedCommonTermsQuery)query).setLowFreqMinimumNumberShouldMatch(minimumShouldMatch); + } + return query; + } + + @Override + protected boolean doEquals(MatchQueryBuilder other) { + return Objects.equals(fieldName, other.fieldName) && + Objects.equals(value, other.value) && + Objects.equals(type, other.type) && + Objects.equals(operator, other.operator) && + Objects.equals(analyzer, other.analyzer) && + Objects.equals(slop, other.slop) && + Objects.equals(fuzziness, other.fuzziness) && + Objects.equals(prefixLength, other.prefixLength) && + Objects.equals(maxExpansions, other.maxExpansions) && + Objects.equals(minimumShouldMatch, other.minimumShouldMatch) && + Objects.equals(fuzzyRewrite, other.fuzzyRewrite) && + Objects.equals(lenient, other.lenient) && + Objects.equals(fuzzyTranspositions, other.fuzzyTranspositions) && + Objects.equals(zeroTermsQuery, other.zeroTermsQuery) && + Objects.equals(cutoffFrequency, other.cutoffFrequency); + } + + @Override + protected int doHashCode() { + return Objects.hash(fieldName, value, type, operator, analyzer, slop, + fuzziness, prefixLength, maxExpansions, minimumShouldMatch, + fuzzyRewrite, lenient, fuzzyTranspositions, zeroTermsQuery, cutoffFrequency); + } + + @Override + protected MatchQueryBuilder doReadFrom(StreamInput in) throws IOException { + MatchQueryBuilder matchQuery = new MatchQueryBuilder(in.readString(), in.readGenericValue()); + matchQuery.type = MatchQuery.Type.readTypeFrom(in); + matchQuery.operator = Operator.readOperatorFrom(in); + matchQuery.slop = in.readVInt(); + matchQuery.prefixLength = in.readVInt(); + matchQuery.maxExpansions = in.readVInt(); + matchQuery.fuzzyTranspositions = in.readBoolean(); + matchQuery.lenient = in.readBoolean(); + matchQuery.zeroTermsQuery = MatchQuery.ZeroTermsQuery.readZeroTermsQueryFrom(in); + // optional fields + matchQuery.analyzer = in.readOptionalString(); + matchQuery.minimumShouldMatch = in.readOptionalString(); + matchQuery.fuzzyRewrite = in.readOptionalString(); + if (in.readBoolean()) { + matchQuery.fuzziness = Fuzziness.readFuzzinessFrom(in); + } + if (in.readBoolean()) { + matchQuery.cutoffFrequency = in.readFloat(); + } + return matchQuery; + } + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + out.writeString(fieldName); + out.writeGenericValue(value); + type.writeTo(out); + operator.writeTo(out); + out.writeVInt(slop); + out.writeVInt(prefixLength); + out.writeVInt(maxExpansions); + out.writeBoolean(fuzzyTranspositions); + out.writeBoolean(lenient); + zeroTermsQuery.writeTo(out); + // optional fields + out.writeOptionalString(analyzer); + out.writeOptionalString(minimumShouldMatch); + out.writeOptionalString(fuzzyRewrite); + if (fuzziness == null) { + out.writeBoolean(false); + } else { + out.writeBoolean(true); + fuzziness.writeTo(out); + } + if (cutoffFrequency == null) { + out.writeBoolean(false); + } else { + out.writeBoolean(true); + out.writeFloat(cutoffFrequency); + } + } + + @Override + public String getWriteableName() { + return NAME; + } } diff --git a/core/src/main/java/org/elasticsearch/index/query/MatchQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/MatchQueryParser.java index 5e8a516d9d8..afcf25ca2a7 100644 --- a/core/src/main/java/org/elasticsearch/index/query/MatchQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/MatchQueryParser.java @@ -19,40 +19,29 @@ package org.elasticsearch.index.query; -import org.apache.lucene.queries.ExtendedCommonTermsQuery; -import org.apache.lucene.search.BooleanClause; -import org.apache.lucene.search.BooleanQuery; -import org.apache.lucene.search.Query; +import org.apache.lucene.search.FuzzyQuery; import org.elasticsearch.common.ParsingException; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.query.support.QueryParsers; import org.elasticsearch.index.search.MatchQuery; +import org.elasticsearch.index.search.MatchQuery.ZeroTermsQuery; import java.io.IOException; /** * */ -public class MatchQueryParser implements QueryParser { - - public static final String NAME = "match"; - - @Inject - public MatchQueryParser() { - } +public class MatchQueryParser implements QueryParser { @Override public String[] names() { return new String[]{ - NAME, "match_phrase", "matchPhrase", "match_phrase_prefix", "matchPhrasePrefix", "matchFuzzy", "match_fuzzy", "fuzzy_match" + MatchQueryBuilder.NAME, "match_phrase", "matchPhrase", "match_phrase_prefix", "matchPhrasePrefix", "matchFuzzy", "match_fuzzy", "fuzzy_match" }; } @Override - public Query parse(QueryParseContext parseContext) throws IOException, ParsingException { + public MatchQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException { XContentParser parser = parseContext.parser(); MatchQuery.Type type = MatchQuery.Type.BOOLEAN; @@ -66,14 +55,24 @@ public class MatchQueryParser implements QueryParser { XContentParser.Token token = parser.nextToken(); if (token != XContentParser.Token.FIELD_NAME) { - throw new ParsingException(parseContext, "[match] query malformed, no field"); + throw new ParsingException(parser.getTokenLocation(), "[match] query malformed, no field"); } String fieldName = parser.currentName(); Object value = null; - float boost = 1.0f; - MatchQuery matchQuery = new MatchQuery(parseContext); + float boost = AbstractQueryBuilder.DEFAULT_BOOST; String minimumShouldMatch = null; + String analyzer = null; + Operator operator = MatchQueryBuilder.DEFAULT_OPERATOR; + int slop = MatchQuery.DEFAULT_PHRASE_SLOP; + Fuzziness fuzziness = null; + int prefixLength = FuzzyQuery.defaultPrefixLength; + int maxExpansion = FuzzyQuery.defaultMaxExpansions; + boolean fuzzyTranspositions = FuzzyQuery.defaultTranspositions; + String fuzzyRewrite = null; + boolean lenient = MatchQuery.DEFAULT_LENIENCY; + Float cutOffFrequency = null; + ZeroTermsQuery zeroTermsQuery = MatchQuery.DEFAULT_ZERO_TERMS_QUERY; String queryName = null; token = parser.nextToken(); @@ -94,57 +93,45 @@ public class MatchQueryParser implements QueryParser { } else if ("phrase_prefix".equals(tStr) || "phrasePrefix".equals(currentFieldName)) { type = MatchQuery.Type.PHRASE_PREFIX; } else { - throw new ParsingException(parseContext, "[match] query does not support type " + tStr); + throw new ParsingException(parser.getTokenLocation(), "[match] query does not support type " + tStr); } } else if ("analyzer".equals(currentFieldName)) { - String analyzer = parser.text(); - if (parseContext.analysisService().analyzer(analyzer) == null) { - throw new ParsingException(parseContext, "[match] analyzer [" + parser.text() + "] not found"); - } - matchQuery.setAnalyzer(analyzer); + analyzer = parser.text(); } else if ("boost".equals(currentFieldName)) { boost = parser.floatValue(); } else if ("slop".equals(currentFieldName) || "phrase_slop".equals(currentFieldName) || "phraseSlop".equals(currentFieldName)) { - matchQuery.setPhraseSlop(parser.intValue()); + slop = parser.intValue(); } else if (parseContext.parseFieldMatcher().match(currentFieldName, Fuzziness.FIELD)) { - matchQuery.setFuzziness(Fuzziness.parse(parser)); + fuzziness = Fuzziness.parse(parser); } else if ("prefix_length".equals(currentFieldName) || "prefixLength".equals(currentFieldName)) { - matchQuery.setFuzzyPrefixLength(parser.intValue()); + prefixLength = parser.intValue(); } else if ("max_expansions".equals(currentFieldName) || "maxExpansions".equals(currentFieldName)) { - matchQuery.setMaxExpansions(parser.intValue()); + maxExpansion = parser.intValue(); } else if ("operator".equals(currentFieldName)) { - String op = parser.text(); - if ("or".equalsIgnoreCase(op)) { - matchQuery.setOccur(BooleanClause.Occur.SHOULD); - } else if ("and".equalsIgnoreCase(op)) { - matchQuery.setOccur(BooleanClause.Occur.MUST); - } else { - throw new ParsingException(parseContext, "text query requires operator to be either 'and' or 'or', not [" - + op + "]"); - } + operator = Operator.fromString(parser.text()); } else if ("minimum_should_match".equals(currentFieldName) || "minimumShouldMatch".equals(currentFieldName)) { minimumShouldMatch = parser.textOrNull(); } else if ("fuzzy_rewrite".equals(currentFieldName) || "fuzzyRewrite".equals(currentFieldName)) { - matchQuery.setFuzzyRewriteMethod(QueryParsers.parseRewriteMethod(parseContext.parseFieldMatcher(), parser.textOrNull(), null)); + fuzzyRewrite = parser.textOrNull(); } else if ("fuzzy_transpositions".equals(currentFieldName)) { - matchQuery.setTranspositions(parser.booleanValue()); + fuzzyTranspositions = parser.booleanValue(); } else if ("lenient".equals(currentFieldName)) { - matchQuery.setLenient(parser.booleanValue()); + lenient = parser.booleanValue(); } else if ("cutoff_frequency".equals(currentFieldName)) { - matchQuery.setCommonTermsCutoff(parser.floatValue()); + cutOffFrequency = parser.floatValue(); } else if ("zero_terms_query".equals(currentFieldName)) { String zeroTermsDocs = parser.text(); if ("none".equalsIgnoreCase(zeroTermsDocs)) { - matchQuery.setZeroTermsQuery(MatchQuery.ZeroTermsQuery.NONE); + zeroTermsQuery = MatchQuery.ZeroTermsQuery.NONE; } else if ("all".equalsIgnoreCase(zeroTermsDocs)) { - matchQuery.setZeroTermsQuery(MatchQuery.ZeroTermsQuery.ALL); + zeroTermsQuery = MatchQuery.ZeroTermsQuery.ALL; } else { - throw new ParsingException(parseContext, "Unsupported zero_terms_docs value [" + zeroTermsDocs + "]"); + throw new ParsingException(parser.getTokenLocation(), "Unsupported zero_terms_docs value [" + zeroTermsDocs + "]"); } } else if ("_name".equals(currentFieldName)) { queryName = parser.text(); } else { - throw new ParsingException(parseContext, "[match] query does not support [" + currentFieldName + "]"); + throw new ParsingException(parser.getTokenLocation(), "[match] query does not support [" + currentFieldName + "]"); } } } @@ -154,29 +141,40 @@ public class MatchQueryParser implements QueryParser { // move to the next token token = parser.nextToken(); if (token != XContentParser.Token.END_OBJECT) { - throw new ParsingException(parseContext, + throw new ParsingException(parser.getTokenLocation(), "[match] query parsed in simplified form, with direct field name, but included more options than just the field name, possibly use its 'options' form, with 'query' element?"); } } if (value == null) { - throw new ParsingException(parseContext, "No text specified for text query"); + throw new ParsingException(parser.getTokenLocation(), "No text specified for text query"); } - Query query = matchQuery.parse(type, fieldName, value); - if (query == null) { - return null; + MatchQueryBuilder matchQuery = new MatchQueryBuilder(fieldName, value); + matchQuery.operator(operator); + matchQuery.type(type); + matchQuery.analyzer(analyzer); + matchQuery.slop(slop); + matchQuery.minimumShouldMatch(minimumShouldMatch); + if (fuzziness != null) { + matchQuery.fuzziness(fuzziness); } + matchQuery.fuzzyRewrite(fuzzyRewrite); + matchQuery.prefixLength(prefixLength); + matchQuery.fuzzyTranspositions(fuzzyTranspositions); + matchQuery.maxExpansions(maxExpansion); + matchQuery.lenient(lenient); + if (cutOffFrequency != null) { + matchQuery.cutoffFrequency(cutOffFrequency); + } + matchQuery.zeroTermsQuery(zeroTermsQuery); + matchQuery.queryName(queryName); + matchQuery.boost(boost); + return matchQuery; + } - if (query instanceof BooleanQuery) { - query = Queries.applyMinimumShouldMatch((BooleanQuery) query, minimumShouldMatch); - } else if (query instanceof ExtendedCommonTermsQuery) { - ((ExtendedCommonTermsQuery)query).setLowFreqMinimumNumberShouldMatch(minimumShouldMatch); - } - query.setBoost(boost); - if (queryName != null) { - parseContext.addNamedQuery(queryName, query); - } - return query; + @Override + public MatchQueryBuilder getBuilderPrototype() { + return MatchQueryBuilder.PROTOTYPE; } } diff --git a/core/src/main/java/org/elasticsearch/index/query/MissingQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/MissingQueryBuilder.java index ac3f279d4d1..a3374bf1c7f 100644 --- a/core/src/main/java/org/elasticsearch/index/query/MissingQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/MissingQueryBuilder.java @@ -19,66 +19,216 @@ package org.elasticsearch.index.query; +import org.apache.lucene.search.BooleanClause; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.ConstantScoreQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.TermRangeQuery; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.internal.FieldNamesFieldMapper; +import org.elasticsearch.index.mapper.object.ObjectMapper; import java.io.IOException; +import java.util.Collection; +import java.util.Objects; /** - * Constructs a filter that only match on documents that the field has a value in them. + * Constructs a filter that have only null values or no value in the original field. */ -public class MissingQueryBuilder extends QueryBuilder { +public class MissingQueryBuilder extends AbstractQueryBuilder { - private String name; + public static final String NAME = "missing"; - private String queryName; + public static final boolean DEFAULT_NULL_VALUE = false; - private Boolean nullValue; + public static final boolean DEFAULT_EXISTENCE_VALUE = true; - private Boolean existence; + private final String fieldPattern; - public MissingQueryBuilder(String name) { - this.name = name; - } + private final boolean nullValue; + + private final boolean existence; + + static final MissingQueryBuilder PROTOTYPE = new MissingQueryBuilder("field", DEFAULT_NULL_VALUE, DEFAULT_EXISTENCE_VALUE); /** - * Should the missing filter automatically include fields with null value configured in the + * Constructs a filter that returns documents with only null values or no value in the original field. + * @param fieldPattern the field to query + * @param nullValue should the missing filter automatically include fields with null value configured in the * mappings. Defaults to false. - */ - public MissingQueryBuilder nullValue(boolean nullValue) { - this.nullValue = nullValue; - return this; - } - - /** - * Should the missing filter include documents where the field doesn't exists in the docs. + * @param existence should the missing filter include documents where the field doesn't exist in the docs. * Defaults to true. + * @throws IllegalArgumentException when both existence and nullValue are set to false */ - public MissingQueryBuilder existence(boolean existence) { + public MissingQueryBuilder(String fieldPattern, boolean nullValue, boolean existence) { + if (Strings.isEmpty(fieldPattern)) { + throw new IllegalArgumentException("missing query must be provided with a [field]"); + } + if (nullValue == false && existence == false) { + throw new IllegalArgumentException("missing query must have either 'existence', or 'null_value', or both set to true"); + } + this.fieldPattern = fieldPattern; + this.nullValue = nullValue; this.existence = existence; - return this; + } + + public MissingQueryBuilder(String fieldPattern) { + this(fieldPattern, DEFAULT_NULL_VALUE, DEFAULT_EXISTENCE_VALUE); + } + + public String fieldPattern() { + return this.fieldPattern; } /** - * Sets the filter name for the filter that can be used when searching for matched_filters per hit. + * Returns true if the missing filter will include documents where the field contains a null value, otherwise + * these documents will not be included. */ - public MissingQueryBuilder queryName(String queryName) { - this.queryName = queryName; - return this; + public boolean nullValue() { + return this.nullValue; + } + + /** + * Returns true if the missing filter will include documents where the field has no values, otherwise + * these documents will not be included. + */ + public boolean existence() { + return this.existence; } @Override protected void doXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(MissingQueryParser.NAME); - builder.field("field", name); - if (nullValue != null) { - builder.field("null_value", nullValue); - } - if (existence != null) { - builder.field("existence", existence); - } - if (queryName != null) { - builder.field("_name", queryName); - } + builder.startObject(NAME); + builder.field("field", fieldPattern); + builder.field("null_value", nullValue); + builder.field("existence", existence); + printBoostAndQueryName(builder); builder.endObject(); } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + protected Query doToQuery(QueryShardContext context) throws IOException { + return newFilter(context, fieldPattern, existence, nullValue); + } + + public static Query newFilter(QueryShardContext context, String fieldPattern, boolean existence, boolean nullValue) { + if (!existence && !nullValue) { + throw new QueryShardException(context, "missing must have either existence, or null_value, or both set to true"); + } + + final FieldNamesFieldMapper.FieldNamesFieldType fieldNamesFieldType = (FieldNamesFieldMapper.FieldNamesFieldType) context.mapperService().fullName(FieldNamesFieldMapper.NAME); + if (fieldNamesFieldType == null) { + // can only happen when no types exist, so no docs exist either + return Queries.newMatchNoDocsQuery(); + } + + ObjectMapper objectMapper = context.getObjectMapper(fieldPattern); + if (objectMapper != null) { + // automatic make the object mapper pattern + fieldPattern = fieldPattern + ".*"; + } + + Collection fields = context.simpleMatchToIndexNames(fieldPattern); + if (fields.isEmpty()) { + if (existence) { + // if we ask for existence of fields, and we found none, then we should match on all + return Queries.newMatchAllQuery(); + } + return null; + } + + Query existenceFilter = null; + Query nullFilter = null; + + if (existence) { + BooleanQuery.Builder boolFilter = new BooleanQuery.Builder(); + for (String field : fields) { + MappedFieldType fieldType = context.fieldMapper(field); + Query filter = null; + if (fieldNamesFieldType.isEnabled()) { + final String f; + if (fieldType != null) { + f = fieldType.names().indexName(); + } else { + f = field; + } + filter = fieldNamesFieldType.termQuery(f, context); + } + // if _field_names are not indexed, we need to go the slow way + if (filter == null && fieldType != null) { + filter = fieldType.rangeQuery(null, null, true, true); + } + if (filter == null) { + filter = new TermRangeQuery(field, null, null, true, true); + } + boolFilter.add(filter, BooleanClause.Occur.SHOULD); + } + + existenceFilter = boolFilter.build(); + existenceFilter = Queries.not(existenceFilter);; + } + + if (nullValue) { + for (String field : fields) { + MappedFieldType fieldType = context.fieldMapper(field); + if (fieldType != null) { + nullFilter = fieldType.nullValueQuery(); + } + } + } + + Query filter; + if (nullFilter != null) { + if (existenceFilter != null) { + filter = new BooleanQuery.Builder() + .add(existenceFilter, BooleanClause.Occur.SHOULD) + .add(nullFilter, BooleanClause.Occur.SHOULD) + .build(); + } else { + filter = nullFilter; + } + } else { + filter = existenceFilter; + } + + if (filter == null) { + return null; + } + + return new ConstantScoreQuery(filter); + } + + @Override + protected MissingQueryBuilder doReadFrom(StreamInput in) throws IOException { + return new MissingQueryBuilder(in.readString(), in.readBoolean(), in.readBoolean()); + } + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + out.writeString(fieldPattern); + out.writeBoolean(nullValue); + out.writeBoolean(existence); + } + + @Override + protected int doHashCode() { + return Objects.hash(fieldPattern, nullValue, existence); + } + + @Override + protected boolean doEquals(MissingQueryBuilder other) { + return Objects.equals(fieldPattern, other.fieldPattern) && + Objects.equals(nullValue, other.nullValue) && + Objects.equals(existence, other.existence); + } } diff --git a/core/src/main/java/org/elasticsearch/index/query/MissingQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/MissingQueryParser.java index df849ffbb4a..8d8c5aec01f 100644 --- a/core/src/main/java/org/elasticsearch/index/query/MissingQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/MissingQueryParser.java @@ -19,48 +19,30 @@ package org.elasticsearch.index.query; -import org.apache.lucene.search.BooleanClause; -import org.apache.lucene.search.BooleanQuery; -import org.apache.lucene.search.ConstantScoreQuery; -import org.apache.lucene.search.Query; -import org.apache.lucene.search.TermRangeQuery; import org.elasticsearch.common.ParsingException; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.mapper.internal.FieldNamesFieldMapper; -import org.elasticsearch.index.mapper.object.ObjectMapper; import java.io.IOException; -import java.util.Collection; /** - * + * Parser for missing query */ -public class MissingQueryParser implements QueryParser { - - public static final String NAME = "missing"; - public static final boolean DEFAULT_NULL_VALUE = false; - public static final boolean DEFAULT_EXISTENCE_VALUE = true; - - @Inject - public MissingQueryParser() { - } +public class MissingQueryParser implements QueryParser { @Override public String[] names() { - return new String[]{NAME}; + return new String[]{MissingQueryBuilder.NAME}; } @Override - public Query parse(QueryParseContext parseContext) throws IOException, ParsingException { + public MissingQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException { XContentParser parser = parseContext.parser(); String fieldPattern = null; String queryName = null; - boolean nullValue = DEFAULT_NULL_VALUE; - boolean existence = DEFAULT_EXISTENCE_VALUE; + float boost = AbstractQueryBuilder.DEFAULT_BOOST; + boolean nullValue = MissingQueryBuilder.DEFAULT_NULL_VALUE; + boolean existence = MissingQueryBuilder.DEFAULT_EXISTENCE_VALUE; XContentParser.Token token; String currentFieldName = null; @@ -76,106 +58,24 @@ public class MissingQueryParser implements QueryParser { existence = parser.booleanValue(); } else if ("_name".equals(currentFieldName)) { queryName = parser.text(); + } else if ("boost".equals(currentFieldName)) { + boost = parser.floatValue(); } else { - throw new ParsingException(parseContext, "[missing] query does not support [" + currentFieldName + "]"); + throw new ParsingException(parser.getTokenLocation(), "[missing] query does not support [" + currentFieldName + "]"); } } } if (fieldPattern == null) { - throw new ParsingException(parseContext, "missing must be provided with a [field]"); + throw new ParsingException(parser.getTokenLocation(), "missing must be provided with a [field]"); } - - return newFilter(parseContext, fieldPattern, existence, nullValue, queryName); + return new MissingQueryBuilder(fieldPattern, nullValue, existence) + .boost(boost) + .queryName(queryName); } - public static Query newFilter(QueryParseContext parseContext, String fieldPattern, boolean existence, boolean nullValue, String queryName) { - if (!existence && !nullValue) { - throw new ParsingException(parseContext, "missing must have either existence, or null_value, or both set to true"); - } - - final FieldNamesFieldMapper.FieldNamesFieldType fieldNamesFieldType = (FieldNamesFieldMapper.FieldNamesFieldType)parseContext.mapperService().fullName(FieldNamesFieldMapper.NAME); - if (fieldNamesFieldType == null) { - // can only happen when no types exist, so no docs exist either - return Queries.newMatchNoDocsQuery(); - } - - ObjectMapper objectMapper = parseContext.getObjectMapper(fieldPattern); - if (objectMapper != null) { - // automatic make the object mapper pattern - fieldPattern = fieldPattern + ".*"; - } - - Collection fields = parseContext.simpleMatchToIndexNames(fieldPattern); - if (fields.isEmpty()) { - if (existence) { - // if we ask for existence of fields, and we found none, then we should match on all - return Queries.newMatchAllQuery(); - } - return null; - } - - Query existenceFilter = null; - Query nullFilter = null; - - if (existence) { - BooleanQuery.Builder boolFilter = new BooleanQuery.Builder(); - for (String field : fields) { - MappedFieldType fieldType = parseContext.fieldMapper(field); - Query filter = null; - if (fieldNamesFieldType.isEnabled()) { - final String f; - if (fieldType != null) { - f = fieldType.names().indexName(); - } else { - f = field; - } - filter = fieldNamesFieldType.termQuery(f, parseContext); - } - // if _field_names are not indexed, we need to go the slow way - if (filter == null && fieldType != null) { - filter = fieldType.rangeQuery(null, null, true, true); - } - if (filter == null) { - filter = new TermRangeQuery(field, null, null, true, true); - } - boolFilter.add(filter, BooleanClause.Occur.SHOULD); - } - - existenceFilter = boolFilter.build(); - existenceFilter = Queries.not(existenceFilter);; - } - - if (nullValue) { - for (String field : fields) { - MappedFieldType fieldType = parseContext.fieldMapper(field); - if (fieldType != null) { - nullFilter = fieldType.nullValueQuery(); - } - } - } - - Query filter; - if (nullFilter != null) { - if (existenceFilter != null) { - filter = new BooleanQuery.Builder() - .add(existenceFilter, BooleanClause.Occur.SHOULD) - .add(nullFilter, BooleanClause.Occur.SHOULD) - .build(); - } else { - filter = nullFilter; - } - } else { - filter = existenceFilter; - } - - if (filter == null) { - return null; - } - - if (queryName != null) { - parseContext.addNamedQuery(queryName, existenceFilter); - } - return new ConstantScoreQuery(filter); + @Override + public MissingQueryBuilder getBuilderPrototype() { + return MissingQueryBuilder.PROTOTYPE; } } diff --git a/core/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilder.java index 5c7e24b53c4..f777654195a 100644 --- a/core/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilder.java @@ -19,33 +19,92 @@ package org.elasticsearch.index.query; +import org.apache.lucene.analysis.Analyzer; +import org.apache.lucene.index.Fields; +import org.apache.lucene.queries.TermsQuery; +import org.apache.lucene.search.BooleanClause; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.util.BytesRef; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ExceptionsHelper; -import org.elasticsearch.action.termvectors.TermVectorsRequest; +import org.elasticsearch.action.termvectors.*; +import org.elasticsearch.client.Client; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseFieldMatcher; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.lucene.search.MoreLikeThisQuery; +import org.elasticsearch.common.lucene.search.XMoreLikeThis; import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.common.xcontent.*; import org.elasticsearch.index.VersionType; +import org.elasticsearch.index.analysis.Analysis; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.internal.UidFieldMapper; +import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; import java.util.*; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.elasticsearch.index.mapper.Uid.createUidAsBytes; /** * A more like this query that finds documents that are "like" the provided set of document(s). * * The documents are provided as a set of strings and/or a list of {@link Item}. */ -public class MoreLikeThisQueryBuilder extends QueryBuilder implements BoostableQueryBuilder { +public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder { + + public static final String NAME = "mlt"; + + public static final int DEFAULT_MAX_QUERY_TERMS = XMoreLikeThis.DEFAULT_MAX_QUERY_TERMS; + public static final int DEFAULT_MIN_TERM_FREQ = XMoreLikeThis.DEFAULT_MIN_TERM_FREQ; + public static final int DEFAULT_MIN_DOC_FREQ = XMoreLikeThis.DEFAULT_MIN_DOC_FREQ; + public static final int DEFAULT_MAX_DOC_FREQ = XMoreLikeThis.DEFAULT_MAX_DOC_FREQ; + public static final int DEFAULT_MIN_WORD_LENGTH = XMoreLikeThis.DEFAULT_MIN_WORD_LENGTH; + public static final int DEFAULT_MAX_WORD_LENGTH = XMoreLikeThis.DEFAULT_MAX_WORD_LENGTH; + public static final String DEFAULT_MINIMUM_SHOULD_MATCH = MoreLikeThisQuery.DEFAULT_MINIMUM_SHOULD_MATCH; + public static final float DEFAULT_BOOST_TERMS = 0; // no boost terms + public static final boolean DEFAULT_INCLUDE = false; + public static final boolean DEFAULT_FAIL_ON_UNSUPPORTED_FIELDS = true; + + // document inputs + private final String[] fields; + private final String[] likeTexts; + private String[] unlikeTexts = Strings.EMPTY_ARRAY; + private final Item[] likeItems; + private Item[] unlikeItems = new Item[0]; + + // term selection parameters + private int maxQueryTerms = DEFAULT_MAX_QUERY_TERMS; + private int minTermFreq = DEFAULT_MIN_TERM_FREQ; + private int minDocFreq = DEFAULT_MIN_DOC_FREQ; + private int maxDocFreq = DEFAULT_MAX_DOC_FREQ; + private int minWordLength = DEFAULT_MIN_WORD_LENGTH; + private int maxWordLength = DEFAULT_MAX_WORD_LENGTH; + private String[] stopWords; + private String analyzer; + + // query formation parameters + private String minimumShouldMatch = DEFAULT_MINIMUM_SHOULD_MATCH; + private float boostTerms = DEFAULT_BOOST_TERMS; + private boolean include = DEFAULT_INCLUDE; + + // other parameters + private boolean failOnUnsupportedField = DEFAULT_FAIL_ON_UNSUPPORTED_FIELDS; + + static final MoreLikeThisQueryBuilder PROTOTYPE = new MoreLikeThisQueryBuilder(new String[]{"_na_"}, null); /** * A single item to be used for a {@link MoreLikeThisQueryBuilder}. */ - public static final class Item implements ToXContent { + public static final class Item implements ToXContent, Writeable { public static final Item[] EMPTY_ARRAY = new Item[0]; public interface Field { @@ -70,6 +129,8 @@ public class MoreLikeThisQueryBuilder extends QueryBuilder implements BoostableQ private long version = Versions.MATCH_ANY; private VersionType versionType = VersionType.INTERNAL; + static final Item PROTOTYPE = new Item(); + public Item() { } @@ -81,7 +142,10 @@ public class MoreLikeThisQueryBuilder extends QueryBuilder implements BoostableQ * @param type the type of the document * @param id and its id */ - public Item(String index, @Nullable String type, String id) { + public Item(@Nullable String index, @Nullable String type, String id) { + if (id == null) { + throw new IllegalArgumentException("Item requires id to be non-null"); + } this.index = index; this.type = type; this.id = id; @@ -94,10 +158,13 @@ public class MoreLikeThisQueryBuilder extends QueryBuilder implements BoostableQ * @param type the type to be used for parsing the doc * @param doc the document specification */ - public Item(String index, String type, XContentBuilder doc) { + public Item(@Nullable String index, @Nullable String type, XContentBuilder doc) { + if (doc == null) { + throw new IllegalArgumentException("Item requires doc to be non-null"); + } this.index = index; this.type = type; - this.doc(doc); + this.doc = doc.bytes(); } public String index() { @@ -122,30 +189,10 @@ public class MoreLikeThisQueryBuilder extends QueryBuilder implements BoostableQ return id; } - public Item id(String id) { - this.id = id; - return this; - } - public BytesReference doc() { return doc; } - /** - * Sets to a given artificial document, that is a document that is not present in the index. - */ - public Item doc(BytesReference doc) { - this.doc = doc; - return this; - } - - /** - * Sets to a given artificial document, that is a document that is not present in the index. - */ - public Item doc(XContentBuilder doc) { - return this.doc(doc.bytes()); - } - public String[] fields() { return fields; } @@ -213,7 +260,7 @@ public class MoreLikeThisQueryBuilder extends QueryBuilder implements BoostableQ // for artificial docs to make sure that the id has changed in the item too if (doc != null) { termVectorsRequest.doc(doc, true); - this.id(termVectorsRequest.id()); + this.id = termVectorsRequest.id(); } return termVectorsRequest; } @@ -235,7 +282,7 @@ public class MoreLikeThisQueryBuilder extends QueryBuilder implements BoostableQ } else if (parseFieldMatcher.match(currentFieldName, Field.ID)) { item.id = parser.text(); } else if (parseFieldMatcher.match(currentFieldName, Field.DOC)) { - item.doc(jsonBuilder().copyCurrentStructure(parser)); + item.doc = jsonBuilder().copyCurrentStructure(parser).bytes(); } else if (parseFieldMatcher.match(currentFieldName, Field.FIELDS)) { if (token == XContentParser.Token.START_ARRAY) { List fields = new ArrayList<>(); @@ -266,6 +313,10 @@ public class MoreLikeThisQueryBuilder extends QueryBuilder implements BoostableQ throw new ElasticsearchParseException( "failed to parse More Like This item. either [id] or [doc] can be specified, but not both!"); } + if (item.id == null && item.doc == null) { + throw new ElasticsearchParseException( + "failed to parse More Like This item. neither [id] nor [doc] is specified!"); + } return item; } @@ -278,7 +329,7 @@ public class MoreLikeThisQueryBuilder extends QueryBuilder implements BoostableQ if (this.type != null) { builder.field(Field.TYPE.getPreferredName(), this.type); } - if (this.id != null && this.doc == null) { + if (this.id != null) { builder.field(Field.ID.getPreferredName(), this.id); } if (this.doc != null) { @@ -322,6 +373,45 @@ public class MoreLikeThisQueryBuilder extends QueryBuilder implements BoostableQ } } + @Override + public Item readFrom(StreamInput in) throws IOException { + Item item = new Item(); + item.index = in.readOptionalString(); + item.type = in.readOptionalString(); + if (in.readBoolean()) { + item.doc = (BytesReference) in.readGenericValue(); + } else { + item.id = in.readString(); + } + item.fields = in.readOptionalStringArray(); + item.perFieldAnalyzer = (Map) in.readGenericValue(); + item.routing = in.readOptionalString(); + item.version = in.readLong(); + item.versionType = VersionType.readVersionTypeFrom(in); + return item; + } + + public static Item readItemFrom(StreamInput in) throws IOException { + return PROTOTYPE.readFrom(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeOptionalString(index); + out.writeOptionalString(type); + out.writeBoolean(doc != null); + if (doc != null) { + out.writeGenericValue(doc); + } else { + out.writeString(id); + } + out.writeOptionalStringArray(fields); + out.writeGenericValue(perFieldAnalyzer); + out.writeOptionalString(routing); + out.writeLong(version); + versionType.writeTo(out); + } + @Override public int hashCode() { return Objects.hash(index, type, id, doc, Arrays.hashCode(fields), perFieldAnalyzer, routing, @@ -345,115 +435,69 @@ public class MoreLikeThisQueryBuilder extends QueryBuilder implements BoostableQ } } - // document inputs - private List likeTexts = new ArrayList<>(); - private List unlikeTexts = new ArrayList<>(); - private List likeItems = new ArrayList<>(); - private List unlikeItems = new ArrayList<>(); - private final String[] fields; - - // term selection parameters - private int maxQueryTerms = -1; - private int minTermFreq = -1; - private int minDocFreq = -1; - private int maxDocFreq = -1; - private int minWordLength = -1; - private int maxWordLength = -1; - private String[] stopWords = null; - private String analyzer; - - // query formation parameters - private String minimumShouldMatch = null; - private float boostTerms = -1; - private Boolean include = null; - - // other parameters - private Boolean failOnUnsupportedField; - private float boost = -1; - private String queryName; - /** * Constructs a new more like this query which uses the "_all" field. + * @param likeTexts the text to use when generating the 'More Like This' query. + * @param likeItems the documents to use when generating the 'More Like This' query. */ - public MoreLikeThisQueryBuilder() { - this.fields = null; + public MoreLikeThisQueryBuilder(String[] likeTexts, Item[] likeItems) { + this(null, likeTexts, likeItems); } /** * Sets the field names that will be used when generating the 'More Like This' query. * * @param fields the field names that will be used when generating the 'More Like This' query. - */ - public MoreLikeThisQueryBuilder(String... fields) { - this.fields = fields; - } - - /** - * Sets the text to use in order to find documents that are "like" this. - * * @param likeTexts the text to use when generating the 'More Like This' query. - */ - public MoreLikeThisQueryBuilder like(String... likeTexts) { - this.likeTexts = new ArrayList<>(); - return addLikeText(likeTexts); - } - - /** - * Sets the documents to use in order to find documents that are "like" this. - * * @param likeItems the documents to use when generating the 'More Like This' query. */ - public MoreLikeThisQueryBuilder like(Item... likeItems) { - this.likeItems = new ArrayList<>(); - return addLikeItem(likeItems); + public MoreLikeThisQueryBuilder(@Nullable String[] fields, @Nullable String[] likeTexts, @Nullable Item[] likeItems) { + // TODO we allow null here for the _all field, but this is forbidden in the parser. Re-check + if (fields != null && fields.length == 0) { + throw new IllegalArgumentException("mlt query requires 'fields' to be specified"); + } + if ((likeTexts == null || likeTexts.length == 0) && (likeItems == null || likeItems.length == 0)) { + throw new IllegalArgumentException("mlt query requires either 'like' texts or items to be specified."); + } + this.fields = fields; + this.likeTexts = Optional.ofNullable(likeTexts).orElse(Strings.EMPTY_ARRAY); + this.likeItems = Optional.ofNullable(likeItems).orElse(new Item[0]); } - /** - * Adds some text to use in order to find documents that are "like" this. - */ - public MoreLikeThisQueryBuilder addLikeText(String... likeTexts) { - Collections.addAll(this.likeTexts, likeTexts); - return this; + public String[] fields() { + return this.fields; } - /** - * Adds a document to use in order to find documents that are "like" this. - */ - public MoreLikeThisQueryBuilder addLikeItem(Item... likeItems) { - Collections.addAll(this.likeItems, likeItems); - return this; + public String[] likeTexts() { + return likeTexts; + } + + public Item[] likeItems() { + return likeItems; } /** * Sets the text from which the terms should not be selected from. */ - public MoreLikeThisQueryBuilder unlike(String... unlikeTexts) { - this.unlikeTexts = new ArrayList<>(); - return addUnlikeText(unlikeTexts); + public MoreLikeThisQueryBuilder unlike(String[] unlikeTexts) { + this.unlikeTexts = Optional.ofNullable(unlikeTexts).orElse(Strings.EMPTY_ARRAY); + return this; + } + + public String[] unlikeTexts() { + return unlikeTexts; } /** * Sets the documents from which the terms should not be selected from. */ - public MoreLikeThisQueryBuilder unlike(Item... unlikeItems) { - this.unlikeItems = new ArrayList<>(); - return addUnlikeItem(unlikeItems); - } - - /** - * Adds some text to use in order to find documents that are "unlike" this. - */ - public MoreLikeThisQueryBuilder addUnlikeText(String... unlikeTexts) { - Collections.addAll(this.unlikeTexts, unlikeTexts); + public MoreLikeThisQueryBuilder unlike(Item[] unlikeItems) { + this.unlikeItems = Optional.ofNullable(unlikeItems).orElse(new Item[0]); return this; } - /** - * Adds a document to use in order to find documents that are "unlike" this. - */ - public MoreLikeThisQueryBuilder addUnlikeItem(Item... unlikeItems) { - Collections.addAll(this.unlikeItems, unlikeItems); - return this; + public Item[] unlikeItems() { + return unlikeItems; } /** @@ -465,6 +509,10 @@ public class MoreLikeThisQueryBuilder extends QueryBuilder implements BoostableQ return this; } + public int maxQueryTerms() { + return maxQueryTerms; + } + /** * The frequency below which terms will be ignored in the source doc. The default * frequency is 2. @@ -474,6 +522,10 @@ public class MoreLikeThisQueryBuilder extends QueryBuilder implements BoostableQ return this; } + public int minTermFreq() { + return minTermFreq; + } + /** * Sets the frequency at which words will be ignored which do not occur in at least this * many docs. Defaults to 5. @@ -483,6 +535,10 @@ public class MoreLikeThisQueryBuilder extends QueryBuilder implements BoostableQ return this; } + public int minDocFreq() { + return minDocFreq; + } + /** * Set the maximum frequency in which words may still appear. Words that appear * in more than this many docs will be ignored. Defaults to unbounded. @@ -492,6 +548,10 @@ public class MoreLikeThisQueryBuilder extends QueryBuilder implements BoostableQ return this; } + public int maxDocFreq() { + return maxDocFreq; + } + /** * Sets the minimum word length below which words will be ignored. Defaults * to 0. @@ -501,6 +561,10 @@ public class MoreLikeThisQueryBuilder extends QueryBuilder implements BoostableQ return this; } + public int minWordLength() { + return minWordLength; + } + /** * Sets the maximum word length above which words will be ignored. Defaults to * unbounded (0). @@ -510,6 +574,10 @@ public class MoreLikeThisQueryBuilder extends QueryBuilder implements BoostableQ return this; } + public int maxWordLength() { + return maxWordLength; + } + /** * Set the set of stopwords. *

@@ -522,6 +590,18 @@ public class MoreLikeThisQueryBuilder extends QueryBuilder implements BoostableQ return this; } + public MoreLikeThisQueryBuilder stopWords(List stopWords) { + if (stopWords == null) { + throw new IllegalArgumentException("requires stopwords to be non-null"); + } + this.stopWords = stopWords.toArray(new String[stopWords.size()]); + return this; + } + + public String[] stopWords() { + return stopWords; + } + /** * The analyzer that will be used to analyze the text. Defaults to the analyzer associated with the fied. */ @@ -530,6 +610,10 @@ public class MoreLikeThisQueryBuilder extends QueryBuilder implements BoostableQ return this; } + public String analyzer() { + return analyzer; + } + /** * Number of terms that must match the generated query expressed in the * common syntax for minimum should match. Defaults to 30%. @@ -537,18 +621,29 @@ public class MoreLikeThisQueryBuilder extends QueryBuilder implements BoostableQ * @see org.elasticsearch.common.lucene.search.Queries#calculateMinShouldMatch(int, String) */ public MoreLikeThisQueryBuilder minimumShouldMatch(String minimumShouldMatch) { + if (minimumShouldMatch == null) { + throw new IllegalArgumentException("[" + NAME + "] requires minimum should match to be non-null"); + } this.minimumShouldMatch = minimumShouldMatch; return this; } + public String minimumShouldMatch() { + return minimumShouldMatch; + } + /** - * Sets the boost factor to use when boosting terms. Defaults to 1. + * Sets the boost factor to use when boosting terms. Defaults to 0 (deactivated). */ public MoreLikeThisQueryBuilder boostTerms(float boostTerms) { this.boostTerms = boostTerms; return this; } + public float boostTerms() { + return boostTerms; + } + /** * Whether to include the input documents. Defaults to false */ @@ -557,145 +652,363 @@ public class MoreLikeThisQueryBuilder extends QueryBuilder implements BoostableQ return this; } + public boolean include() { + return include; + } + /** * Whether to fail or return no result when this query is run against a field which is not supported such as binary/numeric fields. */ public MoreLikeThisQueryBuilder failOnUnsupportedField(boolean fail) { - failOnUnsupportedField = fail; + this.failOnUnsupportedField = fail; return this; } - @Override - public MoreLikeThisQueryBuilder boost(float boost) { - this.boost = boost; - return this; + public boolean failOnUnsupportedField() { + return failOnUnsupportedField; } /** - * Sets the query name for the filter that can be used when searching for matched_filters per hit. - */ - public MoreLikeThisQueryBuilder queryName(String queryName) { - this.queryName = queryName; - return this; - } - - /** - * The text to use in order to find documents that are "like" this. + * Converts an array of String ids to and Item[]. + * @param ids the ids to convert + * @return the new items array + * @deprecated construct the items array externaly and use it in the constructor / setter */ @Deprecated - public MoreLikeThisQueryBuilder likeText(String likeText) { - return like(likeText); - } - - @Deprecated - public MoreLikeThisQueryBuilder ids(String... ids) { + public static Item[] ids(String... ids) { Item[] items = new Item[ids.length]; for (int i = 0; i < items.length; i++) { items[i] = new Item(null, null, ids[i]); } - return like(items); - } - - @Deprecated - public MoreLikeThisQueryBuilder docs(Item... docs) { - return like(docs); - } - - /** - * Sets the documents from which the terms should not be selected from. - * - * @Deprecated Use {@link #unlike(Item...)} instead - */ - @Deprecated - public MoreLikeThisQueryBuilder ignoreLike(Item... docs) { - return unlike(docs); - } - - /** - * Sets the text from which the terms should not be selected from. - * - * @Deprecated Use {@link #unlike(String...)} instead. - */ - @Deprecated - public MoreLikeThisQueryBuilder ignoreLike(String... likeText) { - return unlike(likeText); - } - - /** - * Adds a document to use in order to find documents that are "like" this. - */ - @Deprecated - public MoreLikeThisQueryBuilder addItem(Item... likeItems) { - return addLikeItem(likeItems); + return items; } @Override protected void doXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(MoreLikeThisQueryParser.NAME); + builder.startObject(NAME); if (fields != null) { builder.field(MoreLikeThisQueryParser.Field.FIELDS.getPreferredName(), fields); } - if (this.likeTexts.isEmpty() && this.likeItems.isEmpty()) { - throw new IllegalArgumentException("more_like_this requires '" + MoreLikeThisQueryParser.Field.LIKE.getPreferredName() + "' to be provided"); - } else { - buildLikeField(builder, MoreLikeThisQueryParser.Field.LIKE.getPreferredName(), likeTexts, likeItems); - } - if (!unlikeTexts.isEmpty() || !unlikeItems.isEmpty()) { - buildLikeField(builder, MoreLikeThisQueryParser.Field.UNLIKE.getPreferredName(), unlikeTexts, unlikeItems); - } - if (maxQueryTerms != -1) { - builder.field(MoreLikeThisQueryParser.Field.MAX_QUERY_TERMS.getPreferredName(), maxQueryTerms); - } - if (minTermFreq != -1) { - builder.field(MoreLikeThisQueryParser.Field.MIN_TERM_FREQ.getPreferredName(), minTermFreq); - } - if (minDocFreq != -1) { - builder.field(MoreLikeThisQueryParser.Field.MIN_DOC_FREQ.getPreferredName(), minDocFreq); - } - if (maxDocFreq != -1) { - builder.field(MoreLikeThisQueryParser.Field.MAX_DOC_FREQ.getPreferredName(), maxDocFreq); - } - if (minWordLength != -1) { - builder.field(MoreLikeThisQueryParser.Field.MIN_WORD_LENGTH.getPreferredName(), minWordLength); - } - if (maxWordLength != -1) { - builder.field(MoreLikeThisQueryParser.Field.MAX_WORD_LENGTH.getPreferredName(), maxWordLength); - } - if (stopWords != null && stopWords.length > 0) { + buildLikeField(builder, MoreLikeThisQueryParser.Field.LIKE.getPreferredName(), likeTexts, likeItems); + buildLikeField(builder, MoreLikeThisQueryParser.Field.UNLIKE.getPreferredName(), unlikeTexts, unlikeItems); + builder.field(MoreLikeThisQueryParser.Field.MAX_QUERY_TERMS.getPreferredName(), maxQueryTerms); + builder.field(MoreLikeThisQueryParser.Field.MIN_TERM_FREQ.getPreferredName(), minTermFreq); + builder.field(MoreLikeThisQueryParser.Field.MIN_DOC_FREQ.getPreferredName(), minDocFreq); + builder.field(MoreLikeThisQueryParser.Field.MAX_DOC_FREQ.getPreferredName(), maxDocFreq); + builder.field(MoreLikeThisQueryParser.Field.MIN_WORD_LENGTH.getPreferredName(), minWordLength); + builder.field(MoreLikeThisQueryParser.Field.MAX_WORD_LENGTH.getPreferredName(), maxWordLength); + if (stopWords != null) { builder.field(MoreLikeThisQueryParser.Field.STOP_WORDS.getPreferredName(), stopWords); } if (analyzer != null) { builder.field(MoreLikeThisQueryParser.Field.ANALYZER.getPreferredName(), analyzer); } - if (minimumShouldMatch != null) { - builder.field(MoreLikeThisQueryParser.Field.MINIMUM_SHOULD_MATCH.getPreferredName(), minimumShouldMatch); - } - if (boostTerms != -1) { - builder.field(MoreLikeThisQueryParser.Field.BOOST_TERMS.getPreferredName(), boostTerms); - } - if (include != null) { - builder.field(MoreLikeThisQueryParser.Field.INCLUDE.getPreferredName(), include); - } - if (failOnUnsupportedField != null) { - builder.field(MoreLikeThisQueryParser.Field.FAIL_ON_UNSUPPORTED_FIELD.getPreferredName(), failOnUnsupportedField); - } - if (boost != -1) { - builder.field("boost", boost); - } - if (queryName != null) { - builder.field("_name", queryName); - } + builder.field(MoreLikeThisQueryParser.Field.MINIMUM_SHOULD_MATCH.getPreferredName(), minimumShouldMatch); + builder.field(MoreLikeThisQueryParser.Field.BOOST_TERMS.getPreferredName(), boostTerms); + builder.field(MoreLikeThisQueryParser.Field.INCLUDE.getPreferredName(), include); + builder.field(MoreLikeThisQueryParser.Field.FAIL_ON_UNSUPPORTED_FIELD.getPreferredName(), failOnUnsupportedField); + printBoostAndQueryName(builder); builder.endObject(); } - private static void buildLikeField(XContentBuilder builder, String fieldName, List texts, List items) throws IOException { - builder.startArray(fieldName); - for (String text : texts) { - builder.value(text); + private static void buildLikeField(XContentBuilder builder, String fieldName, String[] texts, Item[] items) throws IOException { + if (texts.length > 0 || items.length > 0) { + builder.startArray(fieldName); + for (String text : texts) { + builder.value(text); + } + for (Item item : items) { + builder.value(item); + } + builder.endArray(); } + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + protected Query doToQuery(QueryShardContext context) throws IOException { + MoreLikeThisQuery mltQuery = new MoreLikeThisQuery(); + + // set similarity + mltQuery.setSimilarity(context.searchSimilarity()); + + // set query parameters + mltQuery.setMaxQueryTerms(maxQueryTerms); + mltQuery.setMinTermFrequency(minTermFreq); + mltQuery.setMinDocFreq(minDocFreq); + mltQuery.setMaxDocFreq(maxDocFreq); + mltQuery.setMinWordLen(minWordLength); + mltQuery.setMaxWordLen(maxWordLength); + mltQuery.setMinimumShouldMatch(minimumShouldMatch); + if (stopWords != null) { + mltQuery.setStopWords(new HashSet<>(Arrays.asList(stopWords))); + } + + // sets boost terms + if (boostTerms != 0) { + mltQuery.setBoostTerms(true); + mltQuery.setBoostTermsFactor(boostTerms); + } + + // set analyzer + Analyzer analyzerObj = context.analysisService().analyzer(analyzer); + if (analyzerObj == null) { + analyzerObj = context.mapperService().searchAnalyzer(); + } + mltQuery.setAnalyzer(analyzerObj); + + // set like text fields + boolean useDefaultField = (fields == null); + List moreLikeFields = new ArrayList<>(); + if (useDefaultField) { + moreLikeFields = Collections.singletonList(context.defaultField()); + } else { + for (String field : fields) { + MappedFieldType fieldType = context.fieldMapper(field); + moreLikeFields.add(fieldType == null ? field : fieldType.names().indexName()); + } + } + + // possibly remove unsupported fields + removeUnsupportedFields(moreLikeFields, analyzerObj, failOnUnsupportedField); + if (moreLikeFields.isEmpty()) { + return null; + } + mltQuery.setMoreLikeFields(moreLikeFields.toArray(Strings.EMPTY_ARRAY)); + + // handle like texts + if (likeTexts.length > 0) { + mltQuery.setLikeText(likeTexts); + } + if (unlikeTexts.length > 0) { + mltQuery.setUnlikeText(unlikeTexts); + } + + // handle items + if (likeItems.length > 0) { + return handleItems(context, mltQuery, likeItems, unlikeItems, include, moreLikeFields, useDefaultField); + } else { + return mltQuery; + } + } + + private static List removeUnsupportedFields(List moreLikeFields, Analyzer analyzer, boolean failOnUnsupportedField) throws IOException { + for (Iterator it = moreLikeFields.iterator(); it.hasNext(); ) { + final String fieldName = it.next(); + if (!Analysis.generatesCharacterTokenStream(analyzer, fieldName)) { + if (failOnUnsupportedField) { + throw new IllegalArgumentException("more_like_this doesn't support binary/numeric fields: [" + fieldName + "]"); + } else { + it.remove(); + } + } + } + return moreLikeFields; + } + + private Query handleItems(QueryShardContext context, MoreLikeThisQuery mltQuery, Item[] likeItems, Item[] unlikeItems, + boolean include, List moreLikeFields, boolean useDefaultField) throws IOException { + // set default index, type and fields if not specified + for (Item item : likeItems) { + setDefaultIndexTypeFields(context, item, moreLikeFields, useDefaultField); + } + for (Item item : unlikeItems) { + setDefaultIndexTypeFields(context, item, moreLikeFields, useDefaultField); + } + + // fetching the items with multi-termvectors API + MultiTermVectorsResponse responses = fetchResponse(context.getClient(), likeItems, unlikeItems, SearchContext.current()); + + // getting the Fields for liked items + mltQuery.setLikeText(getFieldsFor(responses, likeItems)); + + // getting the Fields for unliked items + if (unlikeItems.length > 0) { + org.apache.lucene.index.Fields[] unlikeFields = getFieldsFor(responses, unlikeItems); + if (unlikeFields.length > 0) { + mltQuery.setUnlikeText(unlikeFields); + } + } + + BooleanQuery boolQuery = new BooleanQuery(); + boolQuery.add(mltQuery, BooleanClause.Occur.SHOULD); + + // exclude the items from the search + if (!include) { + handleExclude(boolQuery, likeItems); + } + return boolQuery; + } + + private static void setDefaultIndexTypeFields(QueryShardContext context, Item item, List moreLikeFields, + boolean useDefaultField) { + if (item.index() == null) { + item.index(context.index().name()); + } + if (item.type() == null) { + if (context.queryTypes().size() > 1) { + throw new QueryShardException(context, + "ambiguous type for item with id: " + item.id() + " and index: " + item.index()); + } else { + item.type(context.queryTypes().iterator().next()); + } + } + // default fields if not present but don't override for artificial docs + if ((item.fields() == null || item.fields().length == 0) && item.doc() == null) { + if (useDefaultField) { + item.fields("*"); + } else { + item.fields(moreLikeFields.toArray(new String[moreLikeFields.size()])); + } + } + } + + private MultiTermVectorsResponse fetchResponse(Client client, Item[] likeItems, @Nullable Item[] unlikeItems, + SearchContext searchContext) throws IOException { + MultiTermVectorsRequest request = new MultiTermVectorsRequest(); + for (Item item : likeItems) { + request.add(item.toTermVectorsRequest()); + } + for (Item item : unlikeItems) { + request.add(item.toTermVectorsRequest()); + } + request.copyContextAndHeadersFrom(searchContext); + return client.multiTermVectors(request).actionGet(); + } + + private static Fields[] getFieldsFor(MultiTermVectorsResponse responses, Item[] items) throws IOException { + List likeFields = new ArrayList<>(); + + Set selectedItems = new HashSet<>(); + for (Item request : items) { + selectedItems.add(new Item(request.index(), request.type(), request.id())); + } + + for (MultiTermVectorsItemResponse response : responses) { + if (!hasResponseFromRequest(response, selectedItems)) { + continue; + } + if (response.isFailed()) { + continue; + } + TermVectorsResponse getResponse = response.getResponse(); + if (!getResponse.isExists()) { + continue; + } + likeFields.add(getResponse.getFields()); + } + return likeFields.toArray(Fields.EMPTY_ARRAY); + } + + private static boolean hasResponseFromRequest(MultiTermVectorsItemResponse response, Set selectedItems) { + return selectedItems.contains(new Item(response.getIndex(), response.getType(), response.getId())); + } + + private static void handleExclude(BooleanQuery boolQuery, Item[] likeItems) { + // artificial docs get assigned a random id and should be disregarded + List uids = new ArrayList<>(); + for (Item item : likeItems) { + if (item.doc() != null) { + continue; + } + uids.add(createUidAsBytes(item.type(), item.id())); + } + if (!uids.isEmpty()) { + TermsQuery query = new TermsQuery(UidFieldMapper.NAME, uids.toArray(new BytesRef[0])); + boolQuery.add(query, BooleanClause.Occur.MUST_NOT); + } + } + + @Override + protected MoreLikeThisQueryBuilder doReadFrom(StreamInput in) throws IOException { + String[] fields = in.readOptionalStringArray(); + String[] likeTexts = in.readStringArray(); + Item[] likeItems = readItems(in); + MoreLikeThisQueryBuilder moreLikeThisQueryBuilder = new MoreLikeThisQueryBuilder(fields, likeTexts, likeItems); + moreLikeThisQueryBuilder.unlikeTexts = in.readStringArray(); + moreLikeThisQueryBuilder.unlikeItems = readItems(in); + moreLikeThisQueryBuilder.maxQueryTerms = in.readVInt(); + moreLikeThisQueryBuilder.minTermFreq = in.readVInt(); + moreLikeThisQueryBuilder.minDocFreq = in.readVInt(); + moreLikeThisQueryBuilder.maxDocFreq = in.readVInt(); + moreLikeThisQueryBuilder.minWordLength = in.readVInt(); + moreLikeThisQueryBuilder.maxWordLength = in.readVInt(); + moreLikeThisQueryBuilder.stopWords = in.readOptionalStringArray(); + moreLikeThisQueryBuilder.analyzer = in.readOptionalString(); + moreLikeThisQueryBuilder.minimumShouldMatch = in.readString(); + moreLikeThisQueryBuilder.boostTerms = (Float) in.readGenericValue(); + moreLikeThisQueryBuilder.include = in.readBoolean(); + moreLikeThisQueryBuilder.failOnUnsupportedField = in.readBoolean(); + return moreLikeThisQueryBuilder; + } + + private static Item[] readItems(StreamInput in) throws IOException { + int size = in.readVInt(); + Item[] items = new Item[size]; + for (int i = 0; i < size; i++) { + items[i] = Item.readItemFrom(in); + } + return items; + } + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + out.writeOptionalStringArray(fields); + out.writeStringArray(likeTexts); + writeItems(likeItems, out); + out.writeStringArray(unlikeTexts); + writeItems(unlikeItems, out); + out.writeVInt(maxQueryTerms); + out.writeVInt(minTermFreq); + out.writeVInt(minDocFreq); + out.writeVInt(maxDocFreq); + out.writeVInt(minWordLength); + out.writeVInt(maxWordLength); + out.writeOptionalStringArray(stopWords); + out.writeOptionalString(analyzer); + out.writeString(minimumShouldMatch); + out.writeGenericValue(boostTerms); + out.writeBoolean(include); + out.writeBoolean(failOnUnsupportedField); + } + + private static void writeItems(Item[] items, StreamOutput out) throws IOException { + out.writeVInt(items.length); for (Item item : items) { - builder.value(item); + item.writeTo(out); } - builder.endArray(); + } + + @Override + protected int doHashCode() { + return Objects.hash(Arrays.hashCode(fields), Arrays.hashCode(likeTexts), + Arrays.hashCode(unlikeTexts), Arrays.hashCode(likeItems), Arrays.hashCode(unlikeItems), + maxQueryTerms, minTermFreq, minDocFreq, maxDocFreq, minWordLength, maxWordLength, + Arrays.hashCode(stopWords), analyzer, minimumShouldMatch, boostTerms, include, failOnUnsupportedField); + } + + @Override + protected boolean doEquals(MoreLikeThisQueryBuilder other) { + return Arrays.equals(fields, other.fields) && + Arrays.equals(likeTexts, other.likeTexts) && + Arrays.equals(unlikeTexts, other.unlikeTexts) && + Arrays.equals(likeItems, other.likeItems) && + Arrays.equals(unlikeItems, other.unlikeItems) && + Objects.equals(maxQueryTerms, other.maxQueryTerms) && + Objects.equals(minTermFreq, other.minTermFreq) && + Objects.equals(minDocFreq, other.minDocFreq) && + Objects.equals(maxDocFreq, other.maxDocFreq) && + Objects.equals(minWordLength, other.minWordLength) && + Objects.equals(maxWordLength, other.maxWordLength) && + Arrays.equals(stopWords, other.stopWords) && // otherwise we are comparing pointers + Objects.equals(analyzer, other.analyzer) && + Objects.equals(minimumShouldMatch, other.minimumShouldMatch) && + Objects.equals(boostTerms, other.boostTerms) && + Objects.equals(include, other.include) && + Objects.equals(failOnUnsupportedField, other.failOnUnsupportedField); } } diff --git a/core/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryParser.java index 692d2f7008f..f7c1945313d 100644 --- a/core/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryParser.java @@ -19,47 +19,21 @@ package org.elasticsearch.index.query; -import org.apache.lucene.analysis.Analyzer; -import org.apache.lucene.queries.TermsQuery; -import org.apache.lucene.search.BooleanClause; -import org.apache.lucene.search.BooleanQuery; -import org.apache.lucene.search.Query; -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.action.termvectors.MultiTermVectorsResponse; -import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.lucene.search.MoreLikeThisQuery; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.analysis.Analysis; -import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.mapper.internal.UidFieldMapper; import org.elasticsearch.index.query.MoreLikeThisQueryBuilder.Item; -import org.elasticsearch.index.search.morelikethis.MoreLikeThisFetchService; -import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; import java.util.ArrayList; -import java.util.Collections; -import java.util.HashSet; -import java.util.Iterator; -import java.util.LinkedList; import java.util.List; -import java.util.Set; - -import static org.elasticsearch.index.mapper.Uid.createUidAsBytes; /** * Parser for the The More Like This Query (MLT Query) which finds documents that are "like" a given set of documents. * * The documents are provided as a set of strings and/or a list of {@link Item}. */ -public class MoreLikeThisQueryParser implements QueryParser { - - public static final String NAME = "mlt"; - private MoreLikeThisFetchService fetchService = null; +public class MoreLikeThisQueryParser implements QueryParser { public interface Field { ParseField FIELDS = new ParseField("fields"); @@ -82,37 +56,40 @@ public class MoreLikeThisQueryParser implements QueryParser { ParseField FAIL_ON_UNSUPPORTED_FIELD = new ParseField("fail_on_unsupported_field"); } - public MoreLikeThisQueryParser() { - - } - - @Inject(optional = true) - public void setFetchService(@Nullable MoreLikeThisFetchService fetchService) { - this.fetchService = fetchService; - } - @Override public String[] names() { - return new String[]{NAME, "more_like_this", "moreLikeThis"}; + return new String[]{MoreLikeThisQueryBuilder.NAME, "more_like_this", "moreLikeThis"}; } @Override - public Query parse(QueryParseContext parseContext) throws IOException, ParsingException { + public MoreLikeThisQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException { XContentParser parser = parseContext.parser(); - MoreLikeThisQuery mltQuery = new MoreLikeThisQuery(); - mltQuery.setSimilarity(parseContext.searchSimilarity()); - + // document inputs + List fields = null; List likeTexts = new ArrayList<>(); List unlikeTexts = new ArrayList<>(); List likeItems = new ArrayList<>(); List unlikeItems = new ArrayList<>(); - List moreLikeFields = null; - Analyzer analyzer = null; - boolean include = false; + // term selection parameters + int maxQueryTerms = MoreLikeThisQueryBuilder.DEFAULT_MAX_QUERY_TERMS; + int minTermFreq = MoreLikeThisQueryBuilder.DEFAULT_MIN_TERM_FREQ; + int minDocFreq = MoreLikeThisQueryBuilder.DEFAULT_MIN_DOC_FREQ; + int maxDocFreq = MoreLikeThisQueryBuilder.DEFAULT_MAX_DOC_FREQ; + int minWordLength = MoreLikeThisQueryBuilder.DEFAULT_MIN_WORD_LENGTH; + int maxWordLength = MoreLikeThisQueryBuilder.DEFAULT_MAX_WORD_LENGTH; + List stopWords = null; + String analyzer = null; - boolean failOnUnsupportedField = true; + // query formation parameters + String minimumShouldMatch = MoreLikeThisQueryBuilder.DEFAULT_MINIMUM_SHOULD_MATCH; + float boostTerms = MoreLikeThisQueryBuilder.DEFAULT_BOOST_TERMS; + boolean include = MoreLikeThisQueryBuilder.DEFAULT_INCLUDE; + + // other parameters + boolean failOnUnsupportedField = MoreLikeThisQueryBuilder.DEFAULT_FAIL_ON_UNSUPPORTED_FIELDS; + float boost = AbstractQueryBuilder.DEFAULT_BOOST; String queryName = null; XContentParser.Token token; @@ -128,45 +105,39 @@ public class MoreLikeThisQueryParser implements QueryParser { } else if (parseContext.parseFieldMatcher().match(currentFieldName, Field.LIKE_TEXT)) { likeTexts.add(parser.text()); } else if (parseContext.parseFieldMatcher().match(currentFieldName, Field.MAX_QUERY_TERMS)) { - mltQuery.setMaxQueryTerms(parser.intValue()); + maxQueryTerms = parser.intValue(); } else if (parseContext.parseFieldMatcher().match(currentFieldName, Field.MIN_TERM_FREQ)) { - mltQuery.setMinTermFrequency(parser.intValue()); + minTermFreq =parser.intValue(); } else if (parseContext.parseFieldMatcher().match(currentFieldName, Field.MIN_DOC_FREQ)) { - mltQuery.setMinDocFreq(parser.intValue()); + minDocFreq = parser.intValue(); } else if (parseContext.parseFieldMatcher().match(currentFieldName, Field.MAX_DOC_FREQ)) { - mltQuery.setMaxDocFreq(parser.intValue()); + maxDocFreq = parser.intValue(); } else if (parseContext.parseFieldMatcher().match(currentFieldName, Field.MIN_WORD_LENGTH)) { - mltQuery.setMinWordLen(parser.intValue()); + minWordLength = parser.intValue(); } else if (parseContext.parseFieldMatcher().match(currentFieldName, Field.MAX_WORD_LENGTH)) { - mltQuery.setMaxWordLen(parser.intValue()); + maxWordLength = parser.intValue(); } else if (parseContext.parseFieldMatcher().match(currentFieldName, Field.ANALYZER)) { - analyzer = parseContext.analysisService().analyzer(parser.text()); + analyzer = parser.text(); } else if (parseContext.parseFieldMatcher().match(currentFieldName, Field.MINIMUM_SHOULD_MATCH)) { - mltQuery.setMinimumShouldMatch(parser.text()); + minimumShouldMatch = parser.text(); } else if (parseContext.parseFieldMatcher().match(currentFieldName, Field.BOOST_TERMS)) { - float boostFactor = parser.floatValue(); - if (boostFactor != 0) { - mltQuery.setBoostTerms(true); - mltQuery.setBoostTermsFactor(boostFactor); - } + boostTerms = parser.floatValue(); } else if (parseContext.parseFieldMatcher().match(currentFieldName, Field.INCLUDE)) { include = parser.booleanValue(); } else if (parseContext.parseFieldMatcher().match(currentFieldName, Field.FAIL_ON_UNSUPPORTED_FIELD)) { failOnUnsupportedField = parser.booleanValue(); } else if ("boost".equals(currentFieldName)) { - mltQuery.setBoost(parser.floatValue()); + boost = parser.floatValue(); } else if ("_name".equals(currentFieldName)) { queryName = parser.text(); } else { - throw new ParsingException(parseContext, "[mlt] query does not support [" + currentFieldName + "]"); + throw new ParsingException(parser.getTokenLocation(), "[mlt] query does not support [" + currentFieldName + "]"); } } else if (token == XContentParser.Token.START_ARRAY) { if (parseContext.parseFieldMatcher().match(currentFieldName, Field.FIELDS)) { - moreLikeFields = new LinkedList<>(); + fields = new ArrayList<>(); while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { - String field = parser.text(); - MappedFieldType fieldType = parseContext.fieldMapper(field); - moreLikeFields.add(fieldType == null ? field : fieldType.names().indexName()); + fields.add(parser.text()); } } else if (parseContext.parseFieldMatcher().match(currentFieldName, Field.LIKE)) { while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { @@ -191,13 +162,12 @@ public class MoreLikeThisQueryParser implements QueryParser { likeItems.add(Item.parse(parser, parseContext.parseFieldMatcher(), new Item())); } } else if (parseContext.parseFieldMatcher().match(currentFieldName, Field.STOP_WORDS)) { - Set stopWords = new HashSet<>(); + stopWords = new ArrayList<>(); while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { stopWords.add(parser.text()); } - mltQuery.setStopWords(stopWords); } else { - throw new ParsingException(parseContext, "[mlt] query does not support [" + currentFieldName + "]"); + throw new ParsingException(parser.getTokenLocation(), "[mlt] query does not support [" + currentFieldName + "]"); } } else if (token == XContentParser.Token.START_OBJECT) { if (parseContext.parseFieldMatcher().match(currentFieldName, Field.LIKE)) { @@ -205,56 +175,44 @@ public class MoreLikeThisQueryParser implements QueryParser { } else if (parseContext.parseFieldMatcher().match(currentFieldName, Field.UNLIKE)) { parseLikeField(parseContext, unlikeTexts, unlikeItems); } else { - throw new ParsingException(parseContext, "[mlt] query does not support [" + currentFieldName + "]"); + throw new ParsingException(parser.getTokenLocation(), "[mlt] query does not support [" + currentFieldName + "]"); } } } if (likeTexts.isEmpty() && likeItems.isEmpty()) { - throw new ParsingException(parseContext, "more_like_this requires 'like' to be specified"); + throw new ParsingException(parser.getTokenLocation(), "more_like_this requires 'like' to be specified"); } - if (moreLikeFields != null && moreLikeFields.isEmpty()) { - throw new ParsingException(parseContext, "more_like_this requires 'fields' to be non-empty"); + if (fields != null && fields.isEmpty()) { + throw new ParsingException(parser.getTokenLocation(), "more_like_this requires 'fields' to be non-empty"); } - // set analyzer - if (analyzer == null) { - analyzer = parseContext.mapperService().searchAnalyzer(); - } - mltQuery.setAnalyzer(analyzer); + String[] fieldsArray = fields == null ? null : fields.toArray(new String[fields.size()]); + String[] likeTextsArray = likeTexts.isEmpty() ? null : likeTexts.toArray(new String[likeTexts.size()]); + String[] unlikeTextsArray = unlikeTexts.isEmpty() ? null : unlikeTexts.toArray(new String[unlikeTexts.size()]); + Item[] likeItemsArray = likeItems.isEmpty() ? null : likeItems.toArray(new Item[likeItems.size()]); + Item[] unlikeItemsArray = unlikeItems.isEmpty() ? null : unlikeItems.toArray(new Item[unlikeItems.size()]); - // set like text fields - boolean useDefaultField = (moreLikeFields == null); - if (useDefaultField) { - moreLikeFields = Collections.singletonList(parseContext.defaultField()); - } - - // possibly remove unsupported fields - removeUnsupportedFields(moreLikeFields, analyzer, failOnUnsupportedField); - if (moreLikeFields.isEmpty()) { - return null; - } - mltQuery.setMoreLikeFields(moreLikeFields.toArray(Strings.EMPTY_ARRAY)); - - // support for named query - if (queryName != null) { - parseContext.addNamedQuery(queryName, mltQuery); - } - - // handle like texts - if (!likeTexts.isEmpty()) { - mltQuery.setLikeText(likeTexts); - } - if (!unlikeTexts.isEmpty()) { - mltQuery.setUnlikeText(unlikeTexts); - } - - // handle items - if (!likeItems.isEmpty()) { - return handleItems(parseContext, mltQuery, likeItems, unlikeItems, include, moreLikeFields, useDefaultField); - } else { - return mltQuery; + MoreLikeThisQueryBuilder moreLikeThisQueryBuilder = new MoreLikeThisQueryBuilder(fieldsArray, likeTextsArray, likeItemsArray) + .unlike(unlikeTextsArray) + .unlike(unlikeItemsArray) + .maxQueryTerms(maxQueryTerms) + .minTermFreq(minTermFreq) + .minDocFreq(minDocFreq) + .maxDocFreq(maxDocFreq) + .minWordLength(minWordLength) + .maxWordLength(maxWordLength) + .analyzer(analyzer) + .minimumShouldMatch(minimumShouldMatch) + .boostTerms(boostTerms) + .include(include) + .failOnUnsupportedField(failOnUnsupportedField) + .boost(boost) + .queryName(queryName); + if (stopWords != null) { + moreLikeThisQueryBuilder.stopWords(stopWords); } + return moreLikeThisQueryBuilder; } private static void parseLikeField(QueryParseContext parseContext, List texts, List items) throws IOException { @@ -268,89 +226,8 @@ public class MoreLikeThisQueryParser implements QueryParser { } } - private static List removeUnsupportedFields(List moreLikeFields, Analyzer analyzer, boolean failOnUnsupportedField) throws IOException { - for (Iterator it = moreLikeFields.iterator(); it.hasNext(); ) { - final String fieldName = it.next(); - if (!Analysis.generatesCharacterTokenStream(analyzer, fieldName)) { - if (failOnUnsupportedField) { - throw new IllegalArgumentException("more_like_this doesn't support binary/numeric fields: [" + fieldName + "]"); - } else { - it.remove(); - } - } - } - return moreLikeFields; - } - - private Query handleItems(QueryParseContext parseContext, MoreLikeThisQuery mltQuery, List likeItems, List unlikeItems, - boolean include, List moreLikeFields, boolean useDefaultField) throws IOException { - // set default index, type and fields if not specified - for (Item item : likeItems) { - setDefaultIndexTypeFields(parseContext, item, moreLikeFields, useDefaultField); - } - for (Item item : unlikeItems) { - setDefaultIndexTypeFields(parseContext, item, moreLikeFields, useDefaultField); - } - - // fetching the items with multi-termvectors API - MultiTermVectorsResponse responses = fetchService.fetchResponse(likeItems, unlikeItems, SearchContext.current()); - - // getting the Fields for liked items - mltQuery.setLikeText(MoreLikeThisFetchService.getFieldsFor(responses, likeItems)); - - // getting the Fields for unliked items - if (!unlikeItems.isEmpty()) { - org.apache.lucene.index.Fields[] unlikeFields = MoreLikeThisFetchService.getFieldsFor(responses, unlikeItems); - if (unlikeFields.length > 0) { - mltQuery.setUnlikeText(unlikeFields); - } - } - - BooleanQuery boolQuery = new BooleanQuery(); - boolQuery.add(mltQuery, BooleanClause.Occur.SHOULD); - - // exclude the items from the search - if (!include) { - handleExclude(boolQuery, likeItems); - } - return boolQuery; - } - - private static void setDefaultIndexTypeFields(QueryParseContext parseContext, Item item, List moreLikeFields, - boolean useDefaultField) { - if (item.index() == null) { - item.index(parseContext.index().name()); - } - if (item.type() == null) { - if (parseContext.queryTypes().size() > 1) { - throw new ParsingException(parseContext, - "ambiguous type for item with id: " + item.id() + " and index: " + item.index()); - } else { - item.type(parseContext.queryTypes().iterator().next()); - } - } - // default fields if not present but don't override for artificial docs - if ((item.fields() == null || item.fields().length == 0) && item.doc() == null) { - if (useDefaultField) { - item.fields("*"); - } else { - item.fields(moreLikeFields.toArray(new String[moreLikeFields.size()])); - } - } - } - - private static void handleExclude(BooleanQuery boolQuery, List likeItems) { - // artificial docs get assigned a random id and should be disregarded - List uids = new ArrayList<>(); - for (Item item : likeItems) { - if (item.doc() != null) { - continue; - } - uids.add(createUidAsBytes(item.type(), item.id())); - } - if (!uids.isEmpty()) { - TermsQuery query = new TermsQuery(UidFieldMapper.NAME, uids.toArray(new BytesRef[0])); - boolQuery.add(query, BooleanClause.Occur.MUST_NOT); - } + @Override + public MoreLikeThisQueryBuilder getBuilderPrototype() { + return MoreLikeThisQueryBuilder.PROTOTYPE; } } diff --git a/core/src/main/java/org/elasticsearch/index/query/MultiMatchQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/MultiMatchQueryBuilder.java index d42f0c786c1..7c5b15297b0 100644 --- a/core/src/main/java/org/elasticsearch/index/query/MultiMatchQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/MultiMatchQueryBuilder.java @@ -19,64 +19,64 @@ package org.elasticsearch.index.query; -import com.carrotsearch.hppc.ObjectFloatHashMap; +import org.apache.lucene.search.FuzzyQuery; +import org.apache.lucene.search.Query; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseFieldMatcher; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.query.support.QueryParsers; import org.elasticsearch.index.search.MatchQuery; +import org.elasticsearch.index.search.MultiMatchQuery; import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; import java.util.Locale; +import java.util.Map; +import java.util.Objects; +import java.util.TreeMap; /** * Same as {@link MatchQueryBuilder} but supports multiple fields. */ -public class MultiMatchQueryBuilder extends QueryBuilder implements BoostableQueryBuilder { +public class MultiMatchQueryBuilder extends AbstractQueryBuilder { - private final Object text; + public static final String NAME = "multi_match"; - private final List fields; - private ObjectFloatHashMap fieldsBoosts; - - private MultiMatchQueryBuilder.Type type; - - private MatchQueryBuilder.Operator operator; + public static final MultiMatchQueryBuilder.Type DEFAULT_TYPE = MultiMatchQueryBuilder.Type.BEST_FIELDS; + public static final Operator DEFAULT_OPERATOR = Operator.OR; + public static final int DEFAULT_PHRASE_SLOP = MatchQuery.DEFAULT_PHRASE_SLOP; + public static final int DEFAULT_PREFIX_LENGTH = FuzzyQuery.defaultPrefixLength; + public static final int DEFAULT_MAX_EXPANSIONS = FuzzyQuery.defaultMaxExpansions; + public static final boolean DEFAULT_LENIENCY = MatchQuery.DEFAULT_LENIENCY; + public static final MatchQuery.ZeroTermsQuery DEFAULT_ZERO_TERMS_QUERY = MatchQuery.DEFAULT_ZERO_TERMS_QUERY; + private final Object value; + private final Map fieldsBoosts; + private MultiMatchQueryBuilder.Type type = DEFAULT_TYPE; + private Operator operator = DEFAULT_OPERATOR; private String analyzer; - - private Float boost; - - private Integer slop; - + private int slop = DEFAULT_PHRASE_SLOP; private Fuzziness fuzziness; - - private Integer prefixLength; - - private Integer maxExpansions; - + private int prefixLength = DEFAULT_PREFIX_LENGTH; + private int maxExpansions = DEFAULT_MAX_EXPANSIONS; private String minimumShouldMatch; - private String fuzzyRewrite = null; - private Boolean useDisMax; - private Float tieBreaker; - - private Boolean lenient; - + private boolean lenient = DEFAULT_LENIENCY; private Float cutoffFrequency = null; + private MatchQuery.ZeroTermsQuery zeroTermsQuery = DEFAULT_ZERO_TERMS_QUERY; - private MatchQueryBuilder.ZeroTermsQuery zeroTermsQuery = null; + static final MultiMatchQueryBuilder PROTOTYPE = new MultiMatchQueryBuilder(""); - private String queryName; - - - public enum Type { + public enum Type implements Writeable { /** * Uses the best matching boolean field as main score and uses @@ -109,6 +109,8 @@ public class MultiMatchQueryBuilder extends QueryBuilder implements BoostableQue */ PHRASE_PREFIX(MatchQuery.Type.PHRASE_PREFIX, 0.0f, new ParseField("phrase_prefix")); + private static final Type PROTOTYPE = BEST_FIELDS; + private MatchQuery.Type matchQueryType; private final float tieBreaker; private final ParseField parseField; @@ -141,12 +143,26 @@ public class MultiMatchQueryBuilder extends QueryBuilder implements BoostableQue } } if (type == null) { - throw new ElasticsearchParseException("failed to parse [{}] query type [{}]. unknown type.", MultiMatchQueryParser.NAME, value); + throw new ElasticsearchParseException("failed to parse [{}] query type [{}]. unknown type.", NAME, value); } return type; } + + @Override + public Type readFrom(StreamInput in) throws IOException { + return Type.values()[in.readVInt()]; + } + + public static Type readTypeFrom(StreamInput in) throws IOException { + return PROTOTYPE.readFrom(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeVInt(this.ordinal()); + } } - + /** * Returns the type (for testing) */ @@ -157,17 +173,32 @@ public class MultiMatchQueryBuilder extends QueryBuilder implements BoostableQue /** * Constructs a new text query. */ - public MultiMatchQueryBuilder(Object text, String... fields) { - this.fields = new ArrayList<>(); - this.fields.addAll(Arrays.asList(fields)); - this.text = text; + public MultiMatchQueryBuilder(Object value, String... fields) { + if (value == null) { + throw new IllegalArgumentException("[" + NAME + "] requires query value"); + } + if (fields == null) { + throw new IllegalArgumentException("[" + NAME + "] requires fields at initalization time"); + } + this.value = value; + this.fieldsBoosts = new TreeMap<>(); + for (String field : fields) { + field(field); + } + } + + public Object value() { + return value; } /** * Adds a field to run the multi match against. */ public MultiMatchQueryBuilder field(String field) { - fields.add(field); + if (Strings.isEmpty(field)) { + throw new IllegalArgumentException("supplied field is null or empty."); + } + this.fieldsBoosts.put(field, AbstractQueryBuilder.DEFAULT_BOOST); return this; } @@ -175,18 +206,32 @@ public class MultiMatchQueryBuilder extends QueryBuilder implements BoostableQue * Adds a field to run the multi match against with a specific boost. */ public MultiMatchQueryBuilder field(String field, float boost) { - fields.add(field); - if (fieldsBoosts == null) { - fieldsBoosts = new ObjectFloatHashMap<>(); + if (Strings.isEmpty(field)) { + throw new IllegalArgumentException("supplied field is null or empty."); } - fieldsBoosts.put(field, boost); + this.fieldsBoosts.put(field, boost); return this; } + /** + * Add several fields to run the query against with a specific boost. + */ + public MultiMatchQueryBuilder fields(Map fields) { + this.fieldsBoosts.putAll(fields); + return this; + } + + public Map fields() { + return fieldsBoosts; + } + /** * Sets the type of the text query. */ public MultiMatchQueryBuilder type(MultiMatchQueryBuilder.Type type) { + if (type == null) { + throw new IllegalArgumentException("[" + NAME + "] requires type to be non-null"); + } this.type = type; return this; } @@ -195,18 +240,32 @@ public class MultiMatchQueryBuilder extends QueryBuilder implements BoostableQue * Sets the type of the text query. */ public MultiMatchQueryBuilder type(Object type) { - this.type = type == null ? null : Type.parse(type.toString().toLowerCase(Locale.ROOT), ParseFieldMatcher.EMPTY); + if (type == null) { + throw new IllegalArgumentException("[" + NAME + "] requires type to be non-null"); + } + this.type = Type.parse(type.toString().toLowerCase(Locale.ROOT), ParseFieldMatcher.EMPTY); return this; } + public Type type() { + return type; + } + /** * Sets the operator to use when using a boolean query. Defaults to OR. */ - public MultiMatchQueryBuilder operator(MatchQueryBuilder.Operator operator) { + public MultiMatchQueryBuilder operator(Operator operator) { + if (operator == null) { + throw new IllegalArgumentException("[" + NAME + "] requires operator to be non-null"); + } this.operator = operator; return this; } + public Operator operator() { + return operator; + } + /** * Explicitly set the analyzer to use. Defaults to use explicit mapping config for the field, or, if not * set, the default search analyzer. @@ -216,65 +275,99 @@ public class MultiMatchQueryBuilder extends QueryBuilder implements BoostableQue return this; } - /** - * Set the boost to apply to the query. - */ - @Override - public MultiMatchQueryBuilder boost(float boost) { - this.boost = boost; - return this; + public String analyzer() { + return analyzer; } /** * Set the phrase slop if evaluated to a phrase query type. */ public MultiMatchQueryBuilder slop(int slop) { + if (slop < 0) { + throw new IllegalArgumentException("No negative slop allowed."); + } this.slop = slop; return this; } + public int slop() { + return slop; + } + /** * Sets the fuzziness used when evaluated to a fuzzy query type. Defaults to "AUTO". */ public MultiMatchQueryBuilder fuzziness(Object fuzziness) { - this.fuzziness = Fuzziness.build(fuzziness); + if (fuzziness != null) { + this.fuzziness = Fuzziness.build(fuzziness); + } return this; } + public Fuzziness fuzziness() { + return fuzziness; + } + public MultiMatchQueryBuilder prefixLength(int prefixLength) { + if (prefixLength < 0) { + throw new IllegalArgumentException("No negative prefix length allowed."); + } this.prefixLength = prefixLength; return this; } + public int prefixLength() { + return prefixLength; + } + /** * When using fuzzy or prefix type query, the number of term expansions to use. Defaults to unbounded * so its recommended to set it to a reasonable value for faster execution. */ public MultiMatchQueryBuilder maxExpansions(int maxExpansions) { + if (maxExpansions <= 0) { + throw new IllegalArgumentException("Max expansions must be strictly great than zero."); + } this.maxExpansions = maxExpansions; return this; } + public int maxExpansions() { + return maxExpansions; + } + public MultiMatchQueryBuilder minimumShouldMatch(String minimumShouldMatch) { this.minimumShouldMatch = minimumShouldMatch; return this; } + public String minimumShouldMatch() { + return minimumShouldMatch; + } + public MultiMatchQueryBuilder fuzzyRewrite(String fuzzyRewrite) { this.fuzzyRewrite = fuzzyRewrite; return this; } + public String fuzzyRewrite() { + return fuzzyRewrite; + } + /** * @deprecated use a tieBreaker of 1.0f to disable "dis-max" * query or select the appropriate {@link Type} */ @Deprecated - public MultiMatchQueryBuilder useDisMax(boolean useDisMax) { + public MultiMatchQueryBuilder useDisMax(Boolean useDisMax) { this.useDisMax = useDisMax; return this; } + public Boolean useDisMax() { + return useDisMax; + } + /** *

Tie-Breaker for "best-match" disjunction queries (OR-Queries). * The tie breaker capability allows documents that match more than one query clause @@ -292,6 +385,27 @@ public class MultiMatchQueryBuilder extends QueryBuilder implements BoostableQue return this; } + /** + *

Tie-Breaker for "best-match" disjunction queries (OR-Queries). + * The tie breaker capability allows documents that match more than one query clause + * (in this case on more than one field) to be scored better than documents that + * match only the best of the fields, without confusing this with the better case of + * two distinct matches in the multiple fields.

+ * + *

A tie-breaker value of 1.0 is interpreted as a signal to score queries as + * "most-match" queries where all matching query clauses are considered for scoring.

+ * + * @see Type + */ + public MultiMatchQueryBuilder tieBreaker(Float tieBreaker) { + this.tieBreaker = tieBreaker; + return this; + } + + public Float tieBreaker() { + return tieBreaker; + } + /** * Sets whether format based failures will be ignored. */ @@ -300,6 +414,9 @@ public class MultiMatchQueryBuilder extends QueryBuilder implements BoostableQue return this; } + public boolean lenient() { + return lenient; + } /** * Set a cutoff value in [0..1] (or absolute number >=1) representing the @@ -311,91 +428,227 @@ public class MultiMatchQueryBuilder extends QueryBuilder implements BoostableQue return this; } + /** + * Set a cutoff value in [0..1] (or absolute number >=1) representing the + * maximum threshold of a terms document frequency to be considered a low + * frequency term. + */ + public MultiMatchQueryBuilder cutoffFrequency(Float cutoff) { + this.cutoffFrequency = cutoff; + return this; + } - public MultiMatchQueryBuilder zeroTermsQuery(MatchQueryBuilder.ZeroTermsQuery zeroTermsQuery) { + public Float cutoffFrequency() { + return cutoffFrequency; + } + + public MultiMatchQueryBuilder zeroTermsQuery(MatchQuery.ZeroTermsQuery zeroTermsQuery) { + if (zeroTermsQuery == null) { + throw new IllegalArgumentException("[" + NAME + "] requires zero terms query to be non-null"); + } this.zeroTermsQuery = zeroTermsQuery; return this; } - /** - * Sets the query name for the filter that can be used when searching for matched_filters per hit. - */ - public MultiMatchQueryBuilder queryName(String queryName) { - this.queryName = queryName; - return this; + public MatchQuery.ZeroTermsQuery zeroTermsQuery() { + return zeroTermsQuery; } @Override public void doXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(MultiMatchQueryParser.NAME); - - builder.field("query", text); + builder.startObject(NAME); + builder.field("query", value); builder.startArray("fields"); - for (String field : fields) { - final int keySlot; - if (fieldsBoosts != null && ((keySlot = fieldsBoosts.indexOf(field)) >= 0)) { - field += "^" + fieldsBoosts.indexGet(keySlot); - } - builder.value(field); + for (Map.Entry fieldEntry : this.fieldsBoosts.entrySet()) { + builder.value(fieldEntry.getKey() + "^" + fieldEntry.getValue()); } builder.endArray(); - - if (type != null) { - builder.field("type", type.toString().toLowerCase(Locale.ENGLISH)); - } - if (operator != null) { - builder.field("operator", operator.toString()); - } + builder.field("type", type.toString().toLowerCase(Locale.ENGLISH)); + builder.field("operator", operator.toString()); if (analyzer != null) { builder.field("analyzer", analyzer); } - if (boost != null) { - builder.field("boost", boost); - } - if (slop != null) { - builder.field("slop", slop); - } + builder.field("slop", slop); if (fuzziness != null) { fuzziness.toXContent(builder, params); } - if (prefixLength != null) { - builder.field("prefix_length", prefixLength); - } - if (maxExpansions != null) { - builder.field("max_expansions", maxExpansions); - } + builder.field("prefix_length", prefixLength); + builder.field("max_expansions", maxExpansions); if (minimumShouldMatch != null) { builder.field("minimum_should_match", minimumShouldMatch); } if (fuzzyRewrite != null) { builder.field("fuzzy_rewrite", fuzzyRewrite); } - if (useDisMax != null) { builder.field("use_dis_max", useDisMax); } - if (tieBreaker != null) { builder.field("tie_breaker", tieBreaker); } - - if (lenient != null) { - builder.field("lenient", lenient); - } - + builder.field("lenient", lenient); if (cutoffFrequency != null) { builder.field("cutoff_frequency", cutoffFrequency); } - - if (zeroTermsQuery != null) { - builder.field("zero_terms_query", zeroTermsQuery.toString()); - } - - if (queryName != null) { - builder.field("_name", queryName); - } - + builder.field("zero_terms_query", zeroTermsQuery.toString()); + printBoostAndQueryName(builder); builder.endObject(); } + @Override + public String getWriteableName() { + return NAME; + } + + @Override + protected Query doToQuery(QueryShardContext context) throws IOException { + MultiMatchQuery multiMatchQuery = new MultiMatchQuery(context); + if (analyzer != null) { + if (context.analysisService().analyzer(analyzer) == null) { + throw new QueryShardException(context, "[" + NAME + "] analyzer [" + analyzer + "] not found"); + } + multiMatchQuery.setAnalyzer(analyzer); + } + multiMatchQuery.setPhraseSlop(slop); + if (fuzziness != null) { + multiMatchQuery.setFuzziness(fuzziness); + } + multiMatchQuery.setFuzzyPrefixLength(prefixLength); + multiMatchQuery.setMaxExpansions(maxExpansions); + multiMatchQuery.setOccur(operator.toBooleanClauseOccur()); + if (fuzzyRewrite != null) { + multiMatchQuery.setFuzzyRewriteMethod(QueryParsers.parseRewriteMethod(context.parseFieldMatcher(), fuzzyRewrite, null)); + } + if (tieBreaker != null) { + multiMatchQuery.setTieBreaker(tieBreaker); + } + if (cutoffFrequency != null) { + multiMatchQuery.setCommonTermsCutoff(cutoffFrequency); + } + multiMatchQuery.setLenient(lenient); + multiMatchQuery.setZeroTermsQuery(zeroTermsQuery); + + if (useDisMax != null) { // backwards foobar + boolean typeUsesDismax = type.tieBreaker() != 1.0f; + if (typeUsesDismax != useDisMax) { + if (useDisMax && tieBreaker == null) { + multiMatchQuery.setTieBreaker(0.0f); + } else { + multiMatchQuery.setTieBreaker(1.0f); + } + } + } + + Map newFieldsBoosts = handleFieldsMatchPattern(context.mapperService(), fieldsBoosts); + + Query query = multiMatchQuery.parse(type, newFieldsBoosts, value, minimumShouldMatch); + if (query == null) { + return null; + } + return query; + } + + @Override + protected void setFinalBoost(Query query) { + // we need to preserve the boost that came out of the parsing phase + query.setBoost(boost * query.getBoost()); + } + + private static Map handleFieldsMatchPattern(MapperService mapperService, Map fieldsBoosts) { + Map newFieldsBoosts = new TreeMap<>(); + for (Map.Entry fieldBoost : fieldsBoosts.entrySet()) { + String fField = fieldBoost.getKey(); + Float fBoost = fieldBoost.getValue(); + if (Regex.isSimpleMatchPattern(fField)) { + for (String field : mapperService.simpleMatchToIndexNames(fField)) { + newFieldsBoosts.put(field, fBoost); + } + } else { + newFieldsBoosts.put(fField, fBoost); + } + } + return newFieldsBoosts; + } + + @Override + protected MultiMatchQueryBuilder doReadFrom(StreamInput in) throws IOException { + MultiMatchQueryBuilder multiMatchQuery = new MultiMatchQueryBuilder(in.readGenericValue()); + int size = in.readVInt(); + for (int i = 0; i < size; i++) { + multiMatchQuery.fieldsBoosts.put(in.readString(), in.readFloat()); + } + multiMatchQuery.type = MultiMatchQueryBuilder.Type.readTypeFrom(in); + multiMatchQuery.operator = Operator.readOperatorFrom(in); + multiMatchQuery.analyzer = in.readOptionalString(); + multiMatchQuery.slop = in.readVInt(); + if (in.readBoolean()) { + multiMatchQuery.fuzziness = Fuzziness.readFuzzinessFrom(in); + } + multiMatchQuery.prefixLength = in.readVInt(); + multiMatchQuery.maxExpansions = in.readVInt(); + multiMatchQuery.minimumShouldMatch = in.readOptionalString(); + multiMatchQuery.fuzzyRewrite = in.readOptionalString(); + multiMatchQuery.useDisMax = in.readOptionalBoolean(); + multiMatchQuery.tieBreaker = (Float) in.readGenericValue(); + multiMatchQuery.lenient = in.readBoolean(); + multiMatchQuery.cutoffFrequency = (Float) in.readGenericValue(); + multiMatchQuery.zeroTermsQuery = MatchQuery.ZeroTermsQuery.readZeroTermsQueryFrom(in); + return multiMatchQuery; + } + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + out.writeGenericValue(value); + out.writeVInt(fieldsBoosts.size()); + for (Map.Entry fieldsEntry : fieldsBoosts.entrySet()) { + out.writeString(fieldsEntry.getKey()); + out.writeFloat(fieldsEntry.getValue()); + } + type.writeTo(out); + operator.writeTo(out); + out.writeOptionalString(analyzer); + out.writeVInt(slop); + if (fuzziness != null) { + out.writeBoolean(true); + fuzziness.writeTo(out); + } else { + out.writeBoolean(false); + } + out.writeVInt(prefixLength); + out.writeVInt(maxExpansions); + out.writeOptionalString(minimumShouldMatch); + out.writeOptionalString(fuzzyRewrite); + out.writeOptionalBoolean(useDisMax); + out.writeGenericValue(tieBreaker); + out.writeBoolean(lenient); + out.writeGenericValue(cutoffFrequency); + zeroTermsQuery.writeTo(out); + } + + @Override + protected int doHashCode() { + return Objects.hash(value, fieldsBoosts, type, operator, analyzer, slop, fuzziness, + prefixLength, maxExpansions, minimumShouldMatch, fuzzyRewrite, useDisMax, tieBreaker, lenient, + cutoffFrequency, zeroTermsQuery); + } + + @Override + protected boolean doEquals(MultiMatchQueryBuilder other) { + return Objects.equals(value, other.value) && + Objects.equals(fieldsBoosts, other.fieldsBoosts) && + Objects.equals(type, other.type) && + Objects.equals(operator, other.operator) && + Objects.equals(analyzer, other.analyzer) && + Objects.equals(slop, other.slop) && + Objects.equals(fuzziness, other.fuzziness) && + Objects.equals(prefixLength, other.prefixLength) && + Objects.equals(maxExpansions, other.maxExpansions) && + Objects.equals(minimumShouldMatch, other.minimumShouldMatch) && + Objects.equals(fuzzyRewrite, other.fuzzyRewrite) && + Objects.equals(useDisMax, other.useDisMax) && + Objects.equals(tieBreaker, other.tieBreaker) && + Objects.equals(lenient, other.lenient) && + Objects.equals(cutoffFrequency, other.cutoffFrequency) && + Objects.equals(zeroTermsQuery, other.zeroTermsQuery); + } } diff --git a/core/src/main/java/org/elasticsearch/index/query/MultiMatchQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/MultiMatchQueryParser.java index 6ac3e4c8384..c86d0295577 100644 --- a/core/src/main/java/org/elasticsearch/index/query/MultiMatchQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/MultiMatchQueryParser.java @@ -19,16 +19,10 @@ package org.elasticsearch.index.query; -import org.apache.lucene.search.BooleanClause; -import org.apache.lucene.search.Query; import org.elasticsearch.common.ParsingException; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.query.support.QueryParsers; import org.elasticsearch.index.search.MatchQuery; -import org.elasticsearch.index.search.MultiMatchQuery; import java.io.IOException; import java.util.HashMap; @@ -37,48 +31,53 @@ import java.util.Map; /** * Same as {@link MatchQueryParser} but has support for multiple fields. */ -public class MultiMatchQueryParser implements QueryParser { - - public static final String NAME = "multi_match"; - - @Inject - public MultiMatchQueryParser() { - } +public class MultiMatchQueryParser implements QueryParser { @Override public String[] names() { return new String[]{ - NAME, "multiMatch" + MultiMatchQueryBuilder.NAME, "multiMatch" }; } @Override - public Query parse(QueryParseContext parseContext) throws IOException, ParsingException { + public MultiMatchQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException { XContentParser parser = parseContext.parser(); Object value = null; - float boost = 1.0f; - Float tieBreaker = null; - MultiMatchQueryBuilder.Type type = null; - MultiMatchQuery multiMatchQuery = new MultiMatchQuery(parseContext); + Map fieldsBoosts = new HashMap<>(); + MultiMatchQueryBuilder.Type type = MultiMatchQueryBuilder.DEFAULT_TYPE; + String analyzer = null; + int slop = MultiMatchQueryBuilder.DEFAULT_PHRASE_SLOP; + Fuzziness fuzziness = null; + int prefixLength = MultiMatchQueryBuilder.DEFAULT_PREFIX_LENGTH; + int maxExpansions = MultiMatchQueryBuilder.DEFAULT_MAX_EXPANSIONS; + Operator operator = MultiMatchQueryBuilder.DEFAULT_OPERATOR; String minimumShouldMatch = null; - Map fieldNameWithBoosts = new HashMap<>(); + String fuzzyRewrite = null; + Boolean useDisMax = null; + Float tieBreaker = null; + Float cutoffFrequency = null; + boolean lenient = MultiMatchQueryBuilder.DEFAULT_LENIENCY; + MatchQuery.ZeroTermsQuery zeroTermsQuery = MultiMatchQueryBuilder.DEFAULT_ZERO_TERMS_QUERY; + + float boost = AbstractQueryBuilder.DEFAULT_BOOST; String queryName = null; + XContentParser.Token token; String currentFieldName = null; - Boolean useDisMax = null; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if ("fields".equals(currentFieldName)) { if (token == XContentParser.Token.START_ARRAY) { while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { - extractFieldAndBoost(parseContext, parser, fieldNameWithBoosts); + parseFieldAndBoost(parser, fieldsBoosts); } } else if (token.isValue()) { - extractFieldAndBoost(parseContext, parser, fieldNameWithBoosts); + parseFieldAndBoost(parser, fieldsBoosts); } else { - throw new ParsingException(parseContext, "[" + NAME + "] query does not support [" + currentFieldName + "]"); + throw new ParsingException(parser.getTokenLocation(), "[" + MultiMatchQueryBuilder.NAME + "] query does not support [" + currentFieldName + "]"); } } else if (token.isValue()) { if ("query".equals(currentFieldName)) { @@ -86,95 +85,79 @@ public class MultiMatchQueryParser implements QueryParser { } else if ("type".equals(currentFieldName)) { type = MultiMatchQueryBuilder.Type.parse(parser.text(), parseContext.parseFieldMatcher()); } else if ("analyzer".equals(currentFieldName)) { - String analyzer = parser.text(); - if (parseContext.analysisService().analyzer(analyzer) == null) { - throw new ParsingException(parseContext, "[" + NAME + "] analyzer [" + parser.text() + "] not found"); - } - multiMatchQuery.setAnalyzer(analyzer); + analyzer = parser.text(); } else if ("boost".equals(currentFieldName)) { boost = parser.floatValue(); } else if ("slop".equals(currentFieldName) || "phrase_slop".equals(currentFieldName) || "phraseSlop".equals(currentFieldName)) { - multiMatchQuery.setPhraseSlop(parser.intValue()); + slop = parser.intValue(); } else if (parseContext.parseFieldMatcher().match(currentFieldName, Fuzziness.FIELD)) { - multiMatchQuery.setFuzziness(Fuzziness.parse(parser)); + fuzziness = Fuzziness.parse(parser); } else if ("prefix_length".equals(currentFieldName) || "prefixLength".equals(currentFieldName)) { - multiMatchQuery.setFuzzyPrefixLength(parser.intValue()); + prefixLength = parser.intValue(); } else if ("max_expansions".equals(currentFieldName) || "maxExpansions".equals(currentFieldName)) { - multiMatchQuery.setMaxExpansions(parser.intValue()); + maxExpansions = parser.intValue(); } else if ("operator".equals(currentFieldName)) { - String op = parser.text(); - if ("or".equalsIgnoreCase(op)) { - multiMatchQuery.setOccur(BooleanClause.Occur.SHOULD); - } else if ("and".equalsIgnoreCase(op)) { - multiMatchQuery.setOccur(BooleanClause.Occur.MUST); - } else { - throw new ParsingException(parseContext, "text query requires operator to be either 'and' or 'or', not [" + op - + "]"); - } + operator = Operator.fromString(parser.text()); } else if ("minimum_should_match".equals(currentFieldName) || "minimumShouldMatch".equals(currentFieldName)) { minimumShouldMatch = parser.textOrNull(); } else if ("fuzzy_rewrite".equals(currentFieldName) || "fuzzyRewrite".equals(currentFieldName)) { - multiMatchQuery.setFuzzyRewriteMethod(QueryParsers.parseRewriteMethod(parseContext.parseFieldMatcher(), parser.textOrNull(), null)); + fuzzyRewrite = parser.textOrNull(); } else if ("use_dis_max".equals(currentFieldName) || "useDisMax".equals(currentFieldName)) { useDisMax = parser.booleanValue(); } else if ("tie_breaker".equals(currentFieldName) || "tieBreaker".equals(currentFieldName)) { - multiMatchQuery.setTieBreaker(tieBreaker = parser.floatValue()); + tieBreaker = parser.floatValue(); } else if ("cutoff_frequency".equals(currentFieldName)) { - multiMatchQuery.setCommonTermsCutoff(parser.floatValue()); + cutoffFrequency = parser.floatValue(); } else if ("lenient".equals(currentFieldName)) { - multiMatchQuery.setLenient(parser.booleanValue()); + lenient = parser.booleanValue(); } else if ("zero_terms_query".equals(currentFieldName)) { String zeroTermsDocs = parser.text(); if ("none".equalsIgnoreCase(zeroTermsDocs)) { - multiMatchQuery.setZeroTermsQuery(MatchQuery.ZeroTermsQuery.NONE); + zeroTermsQuery = MatchQuery.ZeroTermsQuery.NONE; } else if ("all".equalsIgnoreCase(zeroTermsDocs)) { - multiMatchQuery.setZeroTermsQuery(MatchQuery.ZeroTermsQuery.ALL); + zeroTermsQuery = MatchQuery.ZeroTermsQuery.ALL; } else { - throw new ParsingException(parseContext, "Unsupported zero_terms_docs value [" + zeroTermsDocs + "]"); + throw new ParsingException(parser.getTokenLocation(), "Unsupported zero_terms_docs value [" + zeroTermsDocs + "]"); } } else if ("_name".equals(currentFieldName)) { queryName = parser.text(); } else { - throw new ParsingException(parseContext, "[match] query does not support [" + currentFieldName + "]"); + throw new ParsingException(parser.getTokenLocation(), "[match] query does not support [" + currentFieldName + "]"); } } } if (value == null) { - throw new ParsingException(parseContext, "No text specified for multi_match query"); + throw new ParsingException(parser.getTokenLocation(), "No text specified for multi_match query"); } - if (fieldNameWithBoosts.isEmpty()) { - throw new ParsingException(parseContext, "No fields specified for multi_match query"); - } - if (type == null) { - type = MultiMatchQueryBuilder.Type.BEST_FIELDS; - } - if (useDisMax != null) { // backwards foobar - boolean typeUsesDismax = type.tieBreaker() != 1.0f; - if (typeUsesDismax != useDisMax) { - if (useDisMax && tieBreaker == null) { - multiMatchQuery.setTieBreaker(0.0f); - } else { - multiMatchQuery.setTieBreaker(1.0f); - } - } - } - Query query = multiMatchQuery.parse(type, fieldNameWithBoosts, value, minimumShouldMatch); - if (query == null) { - return null; + if (fieldsBoosts.isEmpty()) { + throw new ParsingException(parser.getTokenLocation(), "No fields specified for multi_match query"); } - query.setBoost(boost); - if (queryName != null) { - parseContext.addNamedQuery(queryName, query); - } - return query; + return new MultiMatchQueryBuilder(value) + .fields(fieldsBoosts) + .type(type) + .analyzer(analyzer) + .cutoffFrequency(cutoffFrequency) + .fuzziness(fuzziness) + .fuzzyRewrite(fuzzyRewrite) + .useDisMax(useDisMax) + .lenient(lenient) + .maxExpansions(maxExpansions) + .minimumShouldMatch(minimumShouldMatch) + .operator(operator) + .prefixLength(prefixLength) + .slop(slop) + .tieBreaker(tieBreaker) + .zeroTermsQuery(zeroTermsQuery) + .boost(boost) + .queryName(queryName); } - private void extractFieldAndBoost(QueryParseContext parseContext, XContentParser parser, Map fieldNameWithBoosts) throws IOException { + private void parseFieldAndBoost(XContentParser parser, Map fieldsBoosts) throws IOException { String fField = null; - Float fBoost = null; + Float fBoost = AbstractQueryBuilder.DEFAULT_BOOST; char[] fieldText = parser.textCharacters(); int end = parser.textOffset() + parser.textLength(); for (int i = parser.textOffset(); i < end; i++) { @@ -188,13 +171,11 @@ public class MultiMatchQueryParser implements QueryParser { if (fField == null) { fField = parser.text(); } + fieldsBoosts.put(fField, fBoost); + } - if (Regex.isSimpleMatchPattern(fField)) { - for (String field : parseContext.mapperService().simpleMatchToIndexNames(fField)) { - fieldNameWithBoosts.put(field, fBoost); - } - } else { - fieldNameWithBoosts.put(fField, fBoost); - } + @Override + public MultiMatchQueryBuilder getBuilderPrototype() { + return MultiMatchQueryBuilder.PROTOTYPE; } } diff --git a/core/src/main/java/org/elasticsearch/index/query/MultiTermQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/MultiTermQueryBuilder.java index 9c7383dc251..0e946d628a1 100644 --- a/core/src/main/java/org/elasticsearch/index/query/MultiTermQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/MultiTermQueryBuilder.java @@ -18,6 +18,6 @@ */ package org.elasticsearch.index.query; -public abstract class MultiTermQueryBuilder extends QueryBuilder { +public interface MultiTermQueryBuilder> extends QueryBuilder { } diff --git a/core/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java index 63b40dcaef1..e012c52b944 100644 --- a/core/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java @@ -19,85 +19,211 @@ package org.elasticsearch.index.query; +import org.apache.lucene.search.Filter; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.join.BitSetProducer; +import org.apache.lucene.search.join.ScoreMode; +import org.apache.lucene.search.join.ToParentBlockJoinQuery; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.index.query.support.QueryInnerHitBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.mapper.object.ObjectMapper; +import org.elasticsearch.index.query.support.QueryInnerHits; +import org.elasticsearch.search.fetch.innerhits.InnerHitsContext; +import org.elasticsearch.search.fetch.innerhits.InnerHitsSubSearchContext; import java.io.IOException; +import java.util.Locale; import java.util.Objects; -public class NestedQueryBuilder extends QueryBuilder implements BoostableQueryBuilder { +public class NestedQueryBuilder extends AbstractQueryBuilder { - private final QueryBuilder queryBuilder; + /** + * The default score move for nested queries. + */ + public static final ScoreMode DEFAULT_SCORE_MODE = ScoreMode.Avg; + + /** + * The queries name used while parsing + */ + public static final String NAME = "nested"; + + private final QueryBuilder query; private final String path; - private String scoreMode; + private ScoreMode scoreMode = DEFAULT_SCORE_MODE; - private float boost = 1.0f; + private QueryInnerHits queryInnerHits; - private String queryName; - - private QueryInnerHitBuilder innerHit; - - public NestedQueryBuilder(String path, QueryBuilder queryBuilder) { + public NestedQueryBuilder(String path, QueryBuilder query) { + if (path == null) { + throw new IllegalArgumentException("[" + NAME + "] requires 'path' field"); + } + if (query == null) { + throw new IllegalArgumentException("[" + NAME + "] requires 'query' field"); + } this.path = path; - this.queryBuilder = Objects.requireNonNull(queryBuilder); + this.query = query; } + + public NestedQueryBuilder(String path, QueryBuilder query, ScoreMode scoreMode, QueryInnerHits queryInnerHits) { + this(path, query); + scoreMode(scoreMode); + this.queryInnerHits = queryInnerHits; + } + /** - * The score mode. + * The score mode how the scores from the matching child documents are mapped into the nested parent document. */ - public NestedQueryBuilder scoreMode(String scoreMode) { + public NestedQueryBuilder scoreMode(ScoreMode scoreMode) { + if (scoreMode == null) { + throw new IllegalArgumentException("[" + NAME + "] requires 'score_mode' field"); + } this.scoreMode = scoreMode; return this; } - /** - * Sets the boost for this query. Documents matching this query will (in addition to the normal - * weightings) have their score multiplied by the boost provided. - */ - @Override - public NestedQueryBuilder boost(float boost) { - this.boost = boost; - return this; - } - - /** - * Sets the query name for the filter that can be used when searching for matched_filters per hit. - */ - public NestedQueryBuilder queryName(String queryName) { - this.queryName = queryName; - return this; - } - /** * Sets inner hit definition in the scope of this nested query and reusing the defined path and query. */ - public NestedQueryBuilder innerHit(QueryInnerHitBuilder innerHit) { - this.innerHit = innerHit; + public NestedQueryBuilder innerHit(QueryInnerHits innerHit) { + this.queryInnerHits = innerHit; return this; } + /** + * Returns the nested query to execute. + */ + public QueryBuilder query() { + return query; + } + + /** + * Returns inner hit definition in the scope of this query and reusing the defined type and query. + */ + public QueryInnerHits innerHit() { + return queryInnerHits; + } + + /** + * Returns how the scores from the matching child documents are mapped into the nested parent document. + */ + public ScoreMode scoreMode() { + return scoreMode; + } + @Override protected void doXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(NestedQueryParser.NAME); + builder.startObject(NAME); builder.field("query"); - queryBuilder.toXContent(builder, params); + query.toXContent(builder, params); builder.field("path", path); if (scoreMode != null) { - builder.field("score_mode", scoreMode); + builder.field("score_mode", scoreMode.name().toLowerCase(Locale.ROOT)); } - if (boost != 1.0f) { - builder.field("boost", boost); - } - if (queryName != null) { - builder.field("_name", queryName); - } - if (innerHit != null) { - builder.startObject("inner_hits"); - builder.value(innerHit); - builder.endObject(); + printBoostAndQueryName(builder); + if (queryInnerHits != null) { + queryInnerHits.toXContent(builder, params); } builder.endObject(); } + @Override + public final String getWriteableName() { + return NAME; + } + + @Override + protected boolean doEquals(NestedQueryBuilder that) { + return Objects.equals(query, that.query) + && Objects.equals(path, that.path) + && Objects.equals(scoreMode, that.scoreMode) + && Objects.equals(queryInnerHits, that.queryInnerHits); + } + + @Override + protected int doHashCode() { + return Objects.hash(query, path, scoreMode, queryInnerHits); + } + + private NestedQueryBuilder(StreamInput in) throws IOException { + path = in.readString(); + final int ordinal = in.readVInt(); + scoreMode = ScoreMode.values()[ordinal]; + query = in.readQuery(); + if (in.readBoolean()) { + queryInnerHits = new QueryInnerHits(in); + } + } + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + out.writeString(path); + out.writeVInt(scoreMode.ordinal()); + out.writeQuery(query); + if (queryInnerHits != null) { + out.writeBoolean(true); + queryInnerHits.writeTo(out); + } else { + out.writeBoolean(false); + } + } + + @Override + protected NestedQueryBuilder doReadFrom(StreamInput in) throws IOException { + return new NestedQueryBuilder(in); + } + + @Override + protected Query doToQuery(QueryShardContext context) throws IOException { + ObjectMapper nestedObjectMapper = context.getObjectMapper(path); + if (nestedObjectMapper == null) { + throw new IllegalStateException("[" + NAME + "] failed to find nested object under path [" + path + "]"); + } + if (!nestedObjectMapper.nested().isNested()) { + throw new IllegalStateException("[" + NAME + "] nested object under path [" + path + "] is not of nested type"); + } + final BitSetProducer parentFilter; + final Filter childFilter; + final ObjectMapper parentObjectMapper; + final Query innerQuery; + ObjectMapper objectMapper = context.nestedScope().getObjectMapper(); + try { + if (objectMapper == null) { + parentFilter = context.bitsetFilter(Queries.newNonNestedFilter()); + } else { + parentFilter = context.bitsetFilter(objectMapper.nestedTypeFilter()); + } + childFilter = nestedObjectMapper.nestedTypeFilter(); + parentObjectMapper = context.nestedScope().nextLevel(nestedObjectMapper); + innerQuery = this.query.toQuery(context); + if (innerQuery == null) { + return null; + } + } finally { + context.nestedScope().previousLevel(); + } + + if (queryInnerHits != null) { + try (XContentParser parser = queryInnerHits.getXcontentParser()) { + XContentParser.Token token = parser.nextToken(); + if (token != XContentParser.Token.START_OBJECT) { + throw new IllegalStateException("start object expected but was: [" + token + "]"); + } + InnerHitsSubSearchContext innerHits = context.indexQueryParserService().getInnerHitsQueryParserHelper().parse(parser); + if (innerHits != null) { + ParsedQuery parsedQuery = new ParsedQuery(innerQuery, context.copyNamedQueries()); + + InnerHitsContext.NestedInnerHits nestedInnerHits = new InnerHitsContext.NestedInnerHits(innerHits.getSubSearchContext(), parsedQuery, null, parentObjectMapper, nestedObjectMapper); + String name = innerHits.getName() != null ? innerHits.getName() : path; + context.addInnerHits(name, nestedInnerHits); + } + } + } + return new ToParentBlockJoinQuery(Queries.filtered(innerQuery, childFilter), parentFilter, scoreMode); + } + } diff --git a/core/src/main/java/org/elasticsearch/index/query/NestedQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/NestedQueryParser.java index 5709d9bffbf..1fabfede29d 100644 --- a/core/src/main/java/org/elasticsearch/index/query/NestedQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/NestedQueryParser.java @@ -19,68 +19,51 @@ package org.elasticsearch.index.query; -import org.apache.lucene.search.ConstantScoreQuery; -import org.apache.lucene.search.Query; import org.apache.lucene.search.join.ScoreMode; -import org.apache.lucene.search.join.ToParentBlockJoinQuery; -import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.query.support.InnerHitsQueryParserHelper; -import org.elasticsearch.index.query.support.NestedInnerQueryParseSupport; -import org.elasticsearch.search.fetch.innerhits.InnerHitsContext; -import org.elasticsearch.search.fetch.innerhits.InnerHitsSubSearchContext; - +import org.elasticsearch.index.query.support.QueryInnerHits; import java.io.IOException; -public class NestedQueryParser implements QueryParser { +public class NestedQueryParser implements QueryParser { - public static final String NAME = "nested"; private static final ParseField FILTER_FIELD = new ParseField("filter").withAllDeprecated("query"); - - private final InnerHitsQueryParserHelper innerHitsQueryParserHelper; - - @Inject - public NestedQueryParser(InnerHitsQueryParserHelper innerHitsQueryParserHelper) { - this.innerHitsQueryParserHelper = innerHitsQueryParserHelper; - } + private static final NestedQueryBuilder PROTOTYPE = new NestedQueryBuilder("", EmptyQueryBuilder.PROTOTYPE); @Override public String[] names() { - return new String[]{NAME, Strings.toCamelCase(NAME)}; + return new String[]{NestedQueryBuilder.NAME, Strings.toCamelCase(NestedQueryBuilder.NAME)}; } @Override - public Query parse(QueryParseContext parseContext) throws IOException, ParsingException { + public NestedQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException { XContentParser parser = parseContext.parser(); - final ToBlockJoinQueryBuilder builder = new ToBlockJoinQueryBuilder(parseContext); - - float boost = 1.0f; - ScoreMode scoreMode = ScoreMode.Avg; + float boost = AbstractQueryBuilder.DEFAULT_BOOST; + ScoreMode scoreMode = NestedQueryBuilder.DEFAULT_SCORE_MODE; String queryName = null; - + QueryBuilder query = null; + String path = null; String currentFieldName = null; + QueryInnerHits queryInnerHits = null; XContentParser.Token token; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token == XContentParser.Token.START_OBJECT) { if ("query".equals(currentFieldName)) { - builder.query(); + query = parseContext.parseInnerQueryBuilder(); } else if (parseContext.parseFieldMatcher().match(currentFieldName, FILTER_FIELD)) { - builder.filter(); + query = parseContext.parseInnerQueryBuilder(); } else if ("inner_hits".equals(currentFieldName)) { - builder.setInnerHits(innerHitsQueryParserHelper.parse(parseContext)); + queryInnerHits = new QueryInnerHits(parser); } else { - throw new ParsingException(parseContext, "[nested] query does not support [" + currentFieldName + "]"); + throw new ParsingException(parser.getTokenLocation(), "[nested] query does not support [" + currentFieldName + "]"); } } else if (token.isValue()) { if ("path".equals(currentFieldName)) { - builder.setPath(parser.text()); + path = parser.text(); } else if ("boost".equals(currentFieldName)) { boost = parser.floatValue(); } else if ("score_mode".equals(currentFieldName) || "scoreMode".equals(currentFieldName)) { @@ -96,73 +79,20 @@ public class NestedQueryParser implements QueryParser { } else if ("none".equals(sScoreMode)) { scoreMode = ScoreMode.None; } else { - throw new ParsingException(parseContext, "illegal score_mode for nested query [" + sScoreMode + "]"); + throw new ParsingException(parser.getTokenLocation(), "illegal score_mode for nested query [" + sScoreMode + "]"); } } else if ("_name".equals(currentFieldName)) { queryName = parser.text(); } else { - throw new ParsingException(parseContext, "[nested] query does not support [" + currentFieldName + "]"); + throw new ParsingException(parser.getTokenLocation(), "[nested] query does not support [" + currentFieldName + "]"); } } } - - builder.setScoreMode(scoreMode); - ToParentBlockJoinQuery joinQuery = builder.build(); - if (joinQuery != null) { - joinQuery.setBoost(boost); - if (queryName != null) { - parseContext.addNamedQuery(queryName, joinQuery); - } - } - return joinQuery; + return new NestedQueryBuilder(path, query, scoreMode, queryInnerHits).queryName(queryName).boost(boost); } - public static class ToBlockJoinQueryBuilder extends NestedInnerQueryParseSupport { - - private ScoreMode scoreMode; - private InnerHitsSubSearchContext innerHits; - - public ToBlockJoinQueryBuilder(QueryParseContext parseContext) throws IOException { - super(parseContext); - } - - public void setScoreMode(ScoreMode scoreMode) { - this.scoreMode = scoreMode; - } - - public void setInnerHits(InnerHitsSubSearchContext innerHits) { - this.innerHits = innerHits; - } - - @Nullable - public ToParentBlockJoinQuery build() throws IOException { - Query innerQuery; - if (queryFound) { - innerQuery = getInnerQuery(); - } else if (filterFound) { - Query innerFilter = getInnerFilter(); - if (innerFilter != null) { - innerQuery = new ConstantScoreQuery(getInnerFilter()); - } else { - innerQuery = null; - } - } else { - throw new ParsingException(parseContext, "[nested] requires either 'query' or 'filter' field"); - } - - if (innerHits != null) { - ParsedQuery parsedQuery = new ParsedQuery(innerQuery, parseContext.copyNamedQueries()); - InnerHitsContext.NestedInnerHits nestedInnerHits = new InnerHitsContext.NestedInnerHits(innerHits.getSubSearchContext(), parsedQuery, null, getParentObjectMapper(), nestedObjectMapper); - String name = innerHits.getName() != null ? innerHits.getName() : path; - parseContext.addInnerHits(name, nestedInnerHits); - } - - if (innerQuery != null) { - return new ToParentBlockJoinQuery(Queries.filtered(innerQuery, childFilter), parentFilter, scoreMode); - } else { - return null; - } - } - + @Override + public NestedQueryBuilder getBuilderPrototype() { + return PROTOTYPE; } } diff --git a/core/src/main/java/org/elasticsearch/index/query/NotQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/NotQueryBuilder.java index c16cf6450ea..72b70a7d126 100644 --- a/core/src/main/java/org/elasticsearch/index/query/NotQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/NotQueryBuilder.java @@ -19,6 +19,10 @@ package org.elasticsearch.index.query; +import org.apache.lucene.search.Query; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.xcontent.XContentBuilder; import java.io.IOException; @@ -27,29 +31,69 @@ import java.util.Objects; /** * A filter that matches documents matching boolean combinations of other filters. */ -public class NotQueryBuilder extends QueryBuilder { +public class NotQueryBuilder extends AbstractQueryBuilder { + + public static final String NAME = "not"; private final QueryBuilder filter; - private String queryName; + static final NotQueryBuilder PROTOTYPE = new NotQueryBuilder(EmptyQueryBuilder.PROTOTYPE); public NotQueryBuilder(QueryBuilder filter) { - this.filter = Objects.requireNonNull(filter); + if (filter == null) { + throw new IllegalArgumentException("inner filter cannot be null"); + } + this.filter = filter; } - public NotQueryBuilder queryName(String queryName) { - this.queryName = queryName; - return this; + /** + * @return the query added to "not". + */ + public QueryBuilder innerQuery() { + return this.filter; } @Override protected void doXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(NotQueryParser.NAME); + builder.startObject(NAME); builder.field("query"); filter.toXContent(builder, params); - if (queryName != null) { - builder.field("_name", queryName); - } + printBoostAndQueryName(builder); builder.endObject(); } -} \ No newline at end of file + + @Override + protected Query doToQuery(QueryShardContext context) throws IOException { + Query luceneQuery = filter.toFilter(context); + if (luceneQuery == null) { + return null; + } + return Queries.not(luceneQuery); + } + + @Override + protected int doHashCode() { + return Objects.hash(filter); + } + + @Override + protected boolean doEquals(NotQueryBuilder other) { + return Objects.equals(filter, other.filter); + } + + @Override + protected NotQueryBuilder doReadFrom(StreamInput in) throws IOException { + QueryBuilder queryBuilder = in.readQuery(); + return new NotQueryBuilder(queryBuilder); + } + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + out.writeQuery(filter); + } + + @Override + public String getWriteableName() { + return NAME; + } +} diff --git a/core/src/main/java/org/elasticsearch/index/query/NotQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/NotQueryParser.java index 80884a84291..de458209b07 100644 --- a/core/src/main/java/org/elasticsearch/index/query/NotQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/NotQueryParser.java @@ -19,41 +19,34 @@ package org.elasticsearch.index.query; -import org.apache.lucene.search.Query; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; /** - * + * Parser for not query */ -public class NotQueryParser implements QueryParser { +public class NotQueryParser implements QueryParser { - public static final String NAME = "not"; - private static final ParseField QUERY_FIELD = new ParseField("filter", "query"); - - @Inject - public NotQueryParser() { - } + private static final ParseField QUERY_FIELD = new ParseField("query", "filter"); @Override public String[] names() { - return new String[]{NAME}; + return new String[]{NotQueryBuilder.NAME}; } @Override - public Query parse(QueryParseContext parseContext) throws IOException, ParsingException { + public NotQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException { XContentParser parser = parseContext.parser(); - Query query = null; + QueryBuilder query = null; boolean queryFound = false; String queryName = null; String currentFieldName = null; + float boost = AbstractQueryBuilder.DEFAULT_BOOST; XContentParser.Token token; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { @@ -62,34 +55,36 @@ public class NotQueryParser implements QueryParser { // skip } else if (token == XContentParser.Token.START_OBJECT) { if (parseContext.parseFieldMatcher().match(currentFieldName, QUERY_FIELD)) { - query = parseContext.parseInnerFilter(); + query = parseContext.parseInnerQueryBuilder(); queryFound = true; } else { queryFound = true; // its the filter, and the name is the field - query = parseContext.parseInnerFilter(currentFieldName); + query = parseContext.parseInnerQueryBuilderByName(currentFieldName); } } else if (token.isValue()) { if ("_name".equals(currentFieldName)) { queryName = parser.text(); + } else if ("boost".equals(currentFieldName)) { + boost = parser.floatValue(); } else { - throw new ParsingException(parseContext, "[not] query does not support [" + currentFieldName + "]"); + throw new ParsingException(parser.getTokenLocation(), "[not] query does not support [" + currentFieldName + "]"); } } } if (!queryFound) { - throw new ParsingException(parseContext, "filter is required when using `not` query"); + throw new ParsingException(parser.getTokenLocation(), "query is required when using `not` query"); } - if (query == null) { - return null; - } + NotQueryBuilder notQueryBuilder = new NotQueryBuilder(query); + notQueryBuilder.queryName(queryName); + notQueryBuilder.boost(boost); + return notQueryBuilder; + } - Query notQuery = Queries.not(query); - if (queryName != null) { - parseContext.addNamedQuery(queryName, notQuery); - } - return notQuery; + @Override + public NotQueryBuilder getBuilderPrototype() { + return NotQueryBuilder.PROTOTYPE; } } \ No newline at end of file diff --git a/core/src/main/java/org/elasticsearch/index/query/Operator.java b/core/src/main/java/org/elasticsearch/index/query/Operator.java new file mode 100644 index 00000000000..78f7fc8976d --- /dev/null +++ b/core/src/main/java/org/elasticsearch/index/query/Operator.java @@ -0,0 +1,83 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.index.query; + +import org.apache.lucene.queryparser.classic.QueryParser; +import org.apache.lucene.search.BooleanClause; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.util.CollectionUtils; + +import java.io.IOException; +import java.util.Locale; + +public enum Operator implements Writeable { + OR, AND; + + private static final Operator PROTOTYPE = OR; + + public BooleanClause.Occur toBooleanClauseOccur() { + switch (this) { + case OR: + return BooleanClause.Occur.SHOULD; + case AND: + return BooleanClause.Occur.MUST; + default: + throw Operator.newOperatorException(this.toString()); + } + } + + public QueryParser.Operator toQueryParserOperator() { + switch (this) { + case OR: + return QueryParser.Operator.OR; + case AND: + return QueryParser.Operator.AND; + default: + throw Operator.newOperatorException(this.toString()); + } + } + + @Override + public Operator readFrom(StreamInput in) throws IOException { + int ordinal = in.readVInt(); + if (ordinal < 0 || ordinal >= values().length) { + throw new IOException("Unknown Operator ordinal [" + ordinal + "]"); + } + return values()[ordinal]; + } + + public static Operator readOperatorFrom(StreamInput in) throws IOException { + return PROTOTYPE.readFrom(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeVInt(this.ordinal()); + } + + public static Operator fromString(String op) { + return valueOf(op.toUpperCase(Locale.ROOT)); + } + + private static IllegalArgumentException newOperatorException(String op) { + return new IllegalArgumentException("operator needs to be either " + CollectionUtils.arrayAsArrayList(values()) + ", but not [" + op + "]"); + } +} diff --git a/core/src/main/java/org/elasticsearch/index/query/PrefixQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/PrefixQueryBuilder.java index e0e5b2f243f..f5ca1360268 100644 --- a/core/src/main/java/org/elasticsearch/index/query/PrefixQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/PrefixQueryBuilder.java @@ -19,44 +19,59 @@ package org.elasticsearch.index.query; +import org.apache.lucene.index.Term; +import org.apache.lucene.search.MultiTermQuery; +import org.apache.lucene.search.PrefixQuery; +import org.apache.lucene.search.Query; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.query.support.QueryParsers; import java.io.IOException; +import java.util.Objects; /** * A Query that matches documents containing terms with a specified prefix. */ -public class PrefixQueryBuilder extends MultiTermQueryBuilder implements BoostableQueryBuilder { +public class PrefixQueryBuilder extends AbstractQueryBuilder implements MultiTermQueryBuilder { - private final String name; + public static final String NAME = "prefix"; - private final String prefix; + private final String fieldName; - private float boost = -1; + private final String value; private String rewrite; - private String queryName; + static final PrefixQueryBuilder PROTOTYPE = new PrefixQueryBuilder("field", "value"); /** * A Query that matches documents containing terms with a specified prefix. * - * @param name The name of the field - * @param prefix The prefix query + * @param fieldName The name of the field + * @param value The prefix query */ - public PrefixQueryBuilder(String name, String prefix) { - this.name = name; - this.prefix = prefix; + public PrefixQueryBuilder(String fieldName, String value) { + if (Strings.isEmpty(fieldName)) { + throw new IllegalArgumentException("field name is null or empty"); + } + if (value == null) { + throw new IllegalArgumentException("value cannot be null."); + } + this.fieldName = fieldName; + this.value = value; } - /** - * Sets the boost for this query. Documents matching this query will (in addition to the normal - * weightings) have their score multiplied by the boost provided. - */ - @Override - public PrefixQueryBuilder boost(float boost) { - this.boost = boost; - return this; + public String fieldName() { + return this.fieldName; + } + + public String value() { + return this.value; } public PrefixQueryBuilder rewrite(String rewrite) { @@ -64,33 +79,71 @@ public class PrefixQueryBuilder extends MultiTermQueryBuilder implements Boostab return this; } - /** - * Sets the query name for the filter that can be used when searching for matched_filters per hit. - */ - public PrefixQueryBuilder queryName(String queryName) { - this.queryName = queryName; - return this; + public String rewrite() { + return this.rewrite; } @Override public void doXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(PrefixQueryParser.NAME); - if (boost == -1 && rewrite == null && queryName == null) { - builder.field(name, prefix); - } else { - builder.startObject(name); - builder.field("prefix", prefix); - if (boost != -1) { - builder.field("boost", boost); - } - if (rewrite != null) { - builder.field("rewrite", rewrite); - } - if (queryName != null) { - builder.field("_name", queryName); - } - builder.endObject(); + builder.startObject(NAME); + builder.startObject(fieldName); + builder.field("prefix", this.value); + if (rewrite != null) { + builder.field("rewrite", rewrite); } + printBoostAndQueryName(builder); + builder.endObject(); builder.endObject(); } -} \ No newline at end of file + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + protected Query doToQuery(QueryShardContext context) throws IOException { + MultiTermQuery.RewriteMethod method = QueryParsers.parseRewriteMethod(context.parseFieldMatcher(), rewrite, null); + + Query query = null; + MappedFieldType fieldType = context.fieldMapper(fieldName); + if (fieldType != null) { + query = fieldType.prefixQuery(value, method, context); + } + if (query == null) { + PrefixQuery prefixQuery = new PrefixQuery(new Term(fieldName, BytesRefs.toBytesRef(value))); + if (method != null) { + prefixQuery.setRewriteMethod(method); + } + query = prefixQuery; + } + + return query; + } + + @Override + protected PrefixQueryBuilder doReadFrom(StreamInput in) throws IOException { + PrefixQueryBuilder prefixQueryBuilder = new PrefixQueryBuilder(in.readString(), in.readString()); + prefixQueryBuilder.rewrite = in.readOptionalString(); + return prefixQueryBuilder; + } + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + out.writeString(fieldName); + out.writeString(value); + out.writeOptionalString(rewrite); + } + + @Override + protected final int doHashCode() { + return Objects.hash(fieldName, value, rewrite); + } + + @Override + protected boolean doEquals(PrefixQueryBuilder other) { + return Objects.equals(fieldName, other.fieldName) && + Objects.equals(value, other.value) && + Objects.equals(rewrite, other.rewrite); + } +} diff --git a/core/src/main/java/org/elasticsearch/index/query/PrefixQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/PrefixQueryParser.java index c99f268dadc..a8dca4c7816 100644 --- a/core/src/main/java/org/elasticsearch/index/query/PrefixQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/PrefixQueryParser.java @@ -19,48 +19,34 @@ package org.elasticsearch.index.query; -import org.apache.lucene.index.Term; -import org.apache.lucene.search.MultiTermQuery; -import org.apache.lucene.search.PrefixQuery; -import org.apache.lucene.search.Query; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.query.support.QueryParsers; import java.io.IOException; /** - * + * Parser for prefix query */ -public class PrefixQueryParser implements QueryParser { - - public static final String NAME = "prefix"; +public class PrefixQueryParser implements QueryParser { private static final ParseField NAME_FIELD = new ParseField("_name").withAllDeprecated("query name is not supported in short version of prefix query"); - @Inject - public PrefixQueryParser() { - } - @Override public String[] names() { - return new String[]{NAME}; + return new String[]{PrefixQueryBuilder.NAME}; } @Override - public Query parse(QueryParseContext parseContext) throws IOException, ParsingException { + public PrefixQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException { XContentParser parser = parseContext.parser(); String fieldName = parser.currentName(); - String rewriteMethod = null; - String queryName = null; - String value = null; - float boost = 1.0f; + String rewrite = null; + + String queryName = null; + float boost = AbstractQueryBuilder.DEFAULT_BOOST; String currentFieldName = null; XContentParser.Token token; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { @@ -81,9 +67,9 @@ public class PrefixQueryParser implements QueryParser { } else if ("boost".equals(currentFieldName)) { boost = parser.floatValue(); } else if ("rewrite".equals(currentFieldName)) { - rewriteMethod = parser.textOrNull(); + rewrite = parser.textOrNull(); } else { - throw new ParsingException(parseContext, "[regexp] query does not support [" + currentFieldName + "]"); + throw new ParsingException(parser.getTokenLocation(), "[regexp] query does not support [" + currentFieldName + "]"); } } } @@ -98,27 +84,16 @@ public class PrefixQueryParser implements QueryParser { } if (value == null) { - throw new ParsingException(parseContext, "No value specified for prefix query"); + throw new ParsingException(parser.getTokenLocation(), "No value specified for prefix query"); } - - MultiTermQuery.RewriteMethod method = QueryParsers.parseRewriteMethod(parseContext.parseFieldMatcher(), rewriteMethod, null); - - Query query = null; - MappedFieldType fieldType = parseContext.fieldMapper(fieldName); - if (fieldType != null) { - query = fieldType.prefixQuery(value, method, parseContext); - } - if (query == null) { - PrefixQuery prefixQuery = new PrefixQuery(new Term(fieldName, BytesRefs.toBytesRef(value))); - if (method != null) { - prefixQuery.setRewriteMethod(method); - } - query = prefixQuery; - } - query.setBoost(boost); - if (queryName != null) { - parseContext.addNamedQuery(queryName, query); - } - return query; + return new PrefixQueryBuilder(fieldName, value) + .rewrite(rewrite) + .boost(boost) + .queryName(queryName); } -} \ No newline at end of file + + @Override + public PrefixQueryBuilder getBuilderPrototype() { + return PrefixQueryBuilder.PROTOTYPE; + } +} diff --git a/core/src/main/java/org/elasticsearch/index/query/QueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/QueryBuilder.java index fa11d3277fd..2fde316a561 100644 --- a/core/src/main/java/org/elasticsearch/index/query/QueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/QueryBuilder.java @@ -19,25 +19,57 @@ package org.elasticsearch.index.query; -import org.elasticsearch.action.support.ToXContentToBytes; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentType; +import org.apache.lucene.search.Query; +import org.elasticsearch.common.io.stream.NamedWriteable; +import org.elasticsearch.common.xcontent.ToXContent; import java.io.IOException; -public abstract class QueryBuilder extends ToXContentToBytes { +public interface QueryBuilder extends NamedWriteable, ToXContent { - protected QueryBuilder() { - super(XContentType.JSON); - } + /** + * Converts this QueryBuilder to a lucene {@link Query}. + * Returns null if this query should be ignored in the context of + * parent queries. + * + * @param context additional information needed to construct the queries + * @return the {@link Query} or null if this query should be ignored upstream + */ + Query toQuery(QueryShardContext context) throws IOException; - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - doXContent(builder, params); - builder.endObject(); - return builder; - } + /** + * Converts this QueryBuilder to an unscored lucene {@link Query} that acts as a filter. + * Returns null if this query should be ignored in the context of + * parent queries. + * + * @param context additional information needed to construct the queries + * @return the {@link Query} or null if this query should be ignored upstream + */ + Query toFilter(QueryShardContext context) throws IOException; - protected abstract void doXContent(XContentBuilder builder, Params params) throws IOException; + /** + * Sets the arbitrary name to be assigned to the query (see named queries). + */ + QB queryName(String queryName); + + /** + * Returns the arbitrary name assigned to the query (see named queries). + */ + String queryName(); + + /** + * Returns the boost for this query. + */ + float boost(); + + /** + * Sets the boost for this query. Documents matching this query will (in addition to the normal + * weightings) have their score multiplied by the boost provided. + */ + QB boost(float boost); + + /** + * Returns the name that identifies uniquely the query + */ + String getName(); } diff --git a/core/src/main/java/org/elasticsearch/index/query/QueryBuilders.java b/core/src/main/java/org/elasticsearch/index/query/QueryBuilders.java index f042056b273..df823e166f7 100644 --- a/core/src/main/java/org/elasticsearch/index/query/QueryBuilders.java +++ b/core/src/main/java/org/elasticsearch/index/query/QueryBuilders.java @@ -24,13 +24,18 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.ShapeRelation; import org.elasticsearch.common.geo.builders.ShapeBuilder; +import org.elasticsearch.index.query.MoreLikeThisQueryBuilder.Item; import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder; import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder; +import org.elasticsearch.index.search.MatchQuery; +import org.elasticsearch.indices.cache.query.terms.TermsLookup; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.Template; +import java.io.IOException; import java.util.Collection; +import java.util.List; import java.util.Map; /** @@ -39,7 +44,7 @@ import java.util.Map; public abstract class QueryBuilders { /** - * A query that match on all documents. + * A query that matches on all documents. */ public static MatchAllQueryBuilder matchAllQuery() { return new MatchAllQueryBuilder(); @@ -52,17 +57,17 @@ public abstract class QueryBuilders { * @param text The query text (to be analyzed). */ public static MatchQueryBuilder matchQuery(String name, Object text) { - return new MatchQueryBuilder(name, text).type(MatchQueryBuilder.Type.BOOLEAN); + return new MatchQueryBuilder(name, text).type(MatchQuery.Type.BOOLEAN); } /** * Creates a common query for the provided field name and text. * - * @param name The field name. + * @param fieldName The field name. * @param text The query text (to be analyzed). */ - public static CommonTermsQueryBuilder commonTermsQuery(String name, Object text) { - return new CommonTermsQueryBuilder(name, text); + public static CommonTermsQueryBuilder commonTermsQuery(String fieldName, Object text) { + return new CommonTermsQueryBuilder(fieldName, text); } /** @@ -82,7 +87,7 @@ public abstract class QueryBuilders { * @param text The query text (to be analyzed). */ public static MatchQueryBuilder matchPhraseQuery(String name, Object text) { - return new MatchQueryBuilder(name, text).type(MatchQueryBuilder.Type.PHRASE); + return new MatchQueryBuilder(name, text).type(MatchQuery.Type.PHRASE); } /** @@ -92,7 +97,7 @@ public abstract class QueryBuilders { * @param text The query text (to be analyzed). */ public static MatchQueryBuilder matchPhrasePrefixQuery(String name, Object text) { - return new MatchQueryBuilder(name, text).type(MatchQueryBuilder.Type.PHRASE_PREFIX); + return new MatchQueryBuilder(name, text).type(MatchQuery.Type.PHRASE_PREFIX); } /** @@ -276,8 +281,8 @@ public abstract class QueryBuilders { * Unlike the "NOT" clause, this still selects documents that contain undesirable terms, * but reduces their overall score: */ - public static BoostingQueryBuilder boostingQuery() { - return new BoostingQueryBuilder(); + public static BoostingQueryBuilder boostingQuery(QueryBuilder positiveQuery, QueryBuilder negativeQuery) { + return new BoostingQueryBuilder(positiveQuery, negativeQuery); } /** @@ -311,26 +316,33 @@ public abstract class QueryBuilders { return new SpanFirstQueryBuilder(match, end); } - public static SpanNearQueryBuilder spanNearQuery() { - return new SpanNearQueryBuilder(); + public static SpanNearQueryBuilder spanNearQuery(SpanQueryBuilder initialClause, int slop) { + return new SpanNearQueryBuilder(initialClause, slop); } - public static SpanNotQueryBuilder spanNotQuery() { - return new SpanNotQueryBuilder(); + public static SpanNotQueryBuilder spanNotQuery(SpanQueryBuilder include, SpanQueryBuilder exclude) { + return new SpanNotQueryBuilder(include, exclude); } - public static SpanOrQueryBuilder spanOrQuery() { - return new SpanOrQueryBuilder(); + public static SpanOrQueryBuilder spanOrQuery(SpanQueryBuilder initialClause) { + return new SpanOrQueryBuilder(initialClause); } - /** Creates a new {@code span_within} builder. */ - public static SpanWithinQueryBuilder spanWithinQuery() { - return new SpanWithinQueryBuilder(); + /** Creates a new {@code span_within} builder. + * @param big the big clause, it must enclose {@code little} for a match. + * @param little the little clause, it must be contained within {@code big} for a match. + */ + public static SpanWithinQueryBuilder spanWithinQuery(SpanQueryBuilder big, SpanQueryBuilder little) { + return new SpanWithinQueryBuilder(big, little); } - /** Creates a new {@code span_containing} builder. */ - public static SpanContainingQueryBuilder spanContainingQuery() { - return new SpanContainingQueryBuilder(); + /** + * Creates a new {@code span_containing} builder. + * @param big the big clause, it must enclose {@code little} for a match. + * @param little the little clause, it must be contained within {@code big} for a match. + */ + public static SpanContainingQueryBuilder spanContainingQuery(SpanQueryBuilder big, SpanQueryBuilder little) { + return new SpanContainingQueryBuilder(big, little); } /** @@ -361,19 +373,34 @@ public abstract class QueryBuilders { } /** - * A query that allows to define a custom scoring function. + * A function_score query with no functions. * * @param queryBuilder The query to custom score + * @return the function score query */ public static FunctionScoreQueryBuilder functionScoreQuery(QueryBuilder queryBuilder) { return new FunctionScoreQueryBuilder(queryBuilder); } /** - * A query that allows to define a custom scoring function. + * A query that allows to define a custom scoring function + * + * @param queryBuilder The query to custom score + * @param filterFunctionBuilders the filters and functions to execute + * @return the function score query */ - public static FunctionScoreQueryBuilder functionScoreQuery() { - return new FunctionScoreQueryBuilder(); + public static FunctionScoreQueryBuilder functionScoreQuery(QueryBuilder queryBuilder, FunctionScoreQueryBuilder.FilterFunctionBuilder[] filterFunctionBuilders) { + return new FunctionScoreQueryBuilder(queryBuilder, filterFunctionBuilders); + } + + /** + * A query that allows to define a custom scoring function + * + * @param filterFunctionBuilders the filters and functions to execute + * @return the function score query + */ + public static FunctionScoreQueryBuilder functionScoreQuery(FunctionScoreQueryBuilder.FilterFunctionBuilder[] filterFunctionBuilders) { + return new FunctionScoreQueryBuilder(filterFunctionBuilders); } /** @@ -392,25 +419,47 @@ public abstract class QueryBuilders { * @param function The function builder used to custom score */ public static FunctionScoreQueryBuilder functionScoreQuery(QueryBuilder queryBuilder, ScoreFunctionBuilder function) { - return (new FunctionScoreQueryBuilder(queryBuilder)).add(function); + return (new FunctionScoreQueryBuilder(queryBuilder, function)); } /** - * A more like this query that finds documents that are "like" the provided {@link MoreLikeThisQueryBuilder#likeText(String)} + * A more like this query that finds documents that are "like" the provided texts or documents * which is checked against the fields the query is constructed with. * - * @param fields The fields to run the query against + * @param fields the field names that will be used when generating the 'More Like This' query. + * @param likeTexts the text to use when generating the 'More Like This' query. + * @param likeItems the documents to use when generating the 'More Like This' query. */ - public static MoreLikeThisQueryBuilder moreLikeThisQuery(String... fields) { - return new MoreLikeThisQueryBuilder(fields); + public static MoreLikeThisQueryBuilder moreLikeThisQuery(String[] fields, String[] likeTexts, Item[] likeItems) { + return new MoreLikeThisQueryBuilder(fields, likeTexts, likeItems); } /** - * A more like this query that finds documents that are "like" the provided {@link MoreLikeThisQueryBuilder#likeText(String)} + * A more like this query that finds documents that are "like" the provided texts or documents * which is checked against the "_all" field. + * @param likeTexts the text to use when generating the 'More Like This' query. + * @param likeItems the documents to use when generating the 'More Like This' query. */ - public static MoreLikeThisQueryBuilder moreLikeThisQuery() { - return new MoreLikeThisQueryBuilder(); + public static MoreLikeThisQueryBuilder moreLikeThisQuery(String[] likeTexts, Item[] likeItems) { + return moreLikeThisQuery(null, likeTexts, likeItems); + } + + /** + * A more like this query that finds documents that are "like" the provided texts + * which is checked against the "_all" field. + * @param likeTexts the text to use when generating the 'More Like This' query. + */ + public static MoreLikeThisQueryBuilder moreLikeThisQuery(String[] likeTexts) { + return moreLikeThisQuery(null, likeTexts, null); + } + + /** + * A more like this query that finds documents that are "like" the provided documents + * which is checked against the "_all" field. + * @param likeItems the documents to use when generating the 'More Like This' query. + */ + public static MoreLikeThisQueryBuilder moreLikeThisQuery(Item[] likeItems) { + return moreLikeThisQuery(null, null, likeItems); } /** @@ -534,19 +583,8 @@ public abstract class QueryBuilders { /** * A Query builder which allows building a query thanks to a JSON string or binary data. */ - public static WrapperQueryBuilder wrapperQuery(byte[] source, int offset, int length) { - return new WrapperQueryBuilder(source, offset, length); - } - - /** - * Query that matches Documents based on the relationship between the given shape and - * indexed shapes - * - * @param name The shape field name - * @param shape Shape to use in the Query - */ - public static GeoShapeQueryBuilder geoShapeQuery(String name, ShapeBuilder shape) { - return new GeoShapeQueryBuilder(name, shape); + public static WrapperQueryBuilder wrapperQuery(byte[] source) { + return new WrapperQueryBuilder(source); } /** @@ -578,11 +616,10 @@ public abstract class QueryBuilders { } /** - * A terms lookup filter for the provided field name. A lookup terms filter can - * extract the terms to filter by from another doc in an index. + * A terms query that can extract the terms from another doc in an index. */ - public static TermsLookupQueryBuilder termsLookupQuery(String name) { - return new TermsLookupQueryBuilder(name); + public static TermsQueryBuilder termsLookupQuery(String name, TermsLookup termsLookup) { + return new TermsQueryBuilder(name, termsLookup); } /** @@ -608,9 +645,31 @@ public abstract class QueryBuilders { * A filter to filter based on a specific range from a specific geo location / point. * * @param name The location field name. + * @param point The point */ - public static GeoDistanceRangeQueryBuilder geoDistanceRangeQuery(String name) { - return new GeoDistanceRangeQueryBuilder(name); + public static GeoDistanceRangeQueryBuilder geoDistanceRangeQuery(String name, GeoPoint point) { + return new GeoDistanceRangeQueryBuilder(name, point); + } + + /** + * A filter to filter based on a specific range from a specific geo location / point. + * + * @param name The location field name. + * @param geohash The point as geohash + */ + public static GeoDistanceRangeQueryBuilder geoDistanceRangeQuery(String name, String geohash) { + return new GeoDistanceRangeQueryBuilder(name, geohash); + } + + /** + * A filter to filter based on a specific range from a specific geo location / point. + * + * @param name The location field name. + * @param lat The points latitude + * @param lon The points longitude + */ + public static GeoDistanceRangeQueryBuilder geoDistanceRangeQuery(String name, double lat, double lon) { + return new GeoDistanceRangeQueryBuilder(name, lat, lon); } /** @@ -622,17 +681,6 @@ public abstract class QueryBuilders { return new GeoBoundingBoxQueryBuilder(name); } - /** - * A filter based on a bounding box defined by geohash. The field this filter is applied to - * must have {"type":"geo_point", "geohash":true} - * to work. - * - * @param name The geo point field name. - */ - public static GeohashCellQuery.Builder geoHashCellQuery(String name) { - return new GeohashCellQuery.Builder(name); - } - /** * A filter based on a bounding box defined by geohash. The field this filter is applied to * must have {"type":"geo_point", "geohash":true} @@ -669,14 +717,14 @@ public abstract class QueryBuilders { public static GeohashCellQuery.Builder geoHashCellQuery(String name, String geohash, boolean neighbors) { return new GeohashCellQuery.Builder(name, geohash, neighbors); } - + /** * A filter to filter based on a polygon defined by a set of locations / points. * * @param name The location field name. */ - public static GeoPolygonQueryBuilder geoPolygonQuery(String name) { - return new GeoPolygonQueryBuilder(name); + public static GeoPolygonQueryBuilder geoPolygonQuery(String name, List points) { + return new GeoPolygonQueryBuilder(name, points); } /** @@ -684,18 +732,13 @@ public abstract class QueryBuilders { * * @param name The shape field name * @param shape Shape to use in the filter - * @param relation relation of the shapes */ - public static GeoShapeQueryBuilder geoShapeQuery(String name, ShapeBuilder shape, ShapeRelation relation) { - return new GeoShapeQueryBuilder(name, shape, relation); - } - - public static GeoShapeQueryBuilder geoShapeQuery(String name, String indexedShapeId, String indexedShapeType, ShapeRelation relation) { - return new GeoShapeQueryBuilder(name, indexedShapeId, indexedShapeType, relation); + public static GeoShapeQueryBuilder geoShapeQuery(String name, ShapeBuilder shape) throws IOException { + return new GeoShapeQueryBuilder(name, shape); } public static GeoShapeQueryBuilder geoShapeQuery(String name, String indexedShapeId, String indexedShapeType) { - return geoShapeQuery(name, indexedShapeId, indexedShapeType, null); + return new GeoShapeQueryBuilder(name, indexedShapeId, indexedShapeType); } /** @@ -704,12 +747,16 @@ public abstract class QueryBuilders { * @param name The shape field name * @param shape Shape to use in the filter */ - public static GeoShapeQueryBuilder geoIntersectionQuery(String name, ShapeBuilder shape) { - return geoShapeQuery(name, shape, ShapeRelation.INTERSECTS); + public static GeoShapeQueryBuilder geoIntersectionQuery(String name, ShapeBuilder shape) throws IOException { + GeoShapeQueryBuilder builder = geoShapeQuery(name, shape); + builder.relation(ShapeRelation.INTERSECTS); + return builder; } public static GeoShapeQueryBuilder geoIntersectionQuery(String name, String indexedShapeId, String indexedShapeType) { - return geoShapeQuery(name, indexedShapeId, indexedShapeType, ShapeRelation.INTERSECTS); + GeoShapeQueryBuilder builder = geoShapeQuery(name, indexedShapeId, indexedShapeType); + builder.relation(ShapeRelation.INTERSECTS); + return builder; } /** @@ -718,12 +765,16 @@ public abstract class QueryBuilders { * @param name The shape field name * @param shape Shape to use in the filter */ - public static GeoShapeQueryBuilder geoWithinQuery(String name, ShapeBuilder shape) { - return geoShapeQuery(name, shape, ShapeRelation.WITHIN); + public static GeoShapeQueryBuilder geoWithinQuery(String name, ShapeBuilder shape) throws IOException { + GeoShapeQueryBuilder builder = geoShapeQuery(name, shape); + builder.relation(ShapeRelation.WITHIN); + return builder; } public static GeoShapeQueryBuilder geoWithinQuery(String name, String indexedShapeId, String indexedShapeType) { - return geoShapeQuery(name, indexedShapeId, indexedShapeType, ShapeRelation.WITHIN); + GeoShapeQueryBuilder builder = geoShapeQuery(name, indexedShapeId, indexedShapeType); + builder.relation(ShapeRelation.WITHIN); + return builder; } /** @@ -732,12 +783,16 @@ public abstract class QueryBuilders { * @param name The shape field name * @param shape Shape to use in the filter */ - public static GeoShapeQueryBuilder geoDisjointQuery(String name, ShapeBuilder shape) { - return geoShapeQuery(name, shape, ShapeRelation.DISJOINT); + public static GeoShapeQueryBuilder geoDisjointQuery(String name, ShapeBuilder shape) throws IOException { + GeoShapeQueryBuilder builder = geoShapeQuery(name, shape); + builder.relation(ShapeRelation.DISJOINT); + return builder; } public static GeoShapeQueryBuilder geoDisjointQuery(String name, String indexedShapeId, String indexedShapeType) { - return geoShapeQuery(name, indexedShapeId, indexedShapeType, ShapeRelation.DISJOINT); + GeoShapeQueryBuilder builder = geoShapeQuery(name, indexedShapeId, indexedShapeType); + builder.relation(ShapeRelation.DISJOINT); + return builder; } /** @@ -751,11 +806,23 @@ public abstract class QueryBuilders { /** * A filter to filter only documents where a field does not exists in them. - * - * @param name The name of the field + * @param name the field to query */ public static MissingQueryBuilder missingQuery(String name) { - return new MissingQueryBuilder(name); + return missingQuery(name, MissingQueryBuilder.DEFAULT_NULL_VALUE, MissingQueryBuilder.DEFAULT_EXISTENCE_VALUE); + } + + /** + * A filter to filter only documents where a field does not exists in them. + * @param name the field to query + * @param nullValue should the missing filter automatically include fields with null value configured in the + * mappings. Defaults to false. + * @param existence should the missing filter include documents where the field doesn't exist in the docs. + * Defaults to true. + * @throws IllegalArgumentException when both existence and nullValue are set to false + */ + public static MissingQueryBuilder missingQuery(String name, boolean nullValue, boolean existence) { + return new MissingQueryBuilder(name, nullValue, existence); } public static NotQueryBuilder notQuery(QueryBuilder filter) { diff --git a/core/src/main/java/org/elasticsearch/index/query/QueryFilterBuilder.java b/core/src/main/java/org/elasticsearch/index/query/QueryFilterBuilder.java index 1e03ef11ecc..4ca9e1598e2 100644 --- a/core/src/main/java/org/elasticsearch/index/query/QueryFilterBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/QueryFilterBuilder.java @@ -19,9 +19,14 @@ package org.elasticsearch.index.query; +import org.apache.lucene.search.ConstantScoreQuery; +import org.apache.lucene.search.Query; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import java.io.IOException; +import java.util.Objects; /** * A filter that simply wraps a query. @@ -30,22 +35,77 @@ import java.io.IOException; */ //TODO: remove when https://github.com/elastic/elasticsearch/issues/13326 is fixed @Deprecated -public class QueryFilterBuilder extends QueryBuilder { +public class QueryFilterBuilder extends AbstractQueryBuilder { + + public static final String NAME = "query"; private final QueryBuilder queryBuilder; + static final QueryFilterBuilder PROTOTYPE = new QueryFilterBuilder(EmptyQueryBuilder.PROTOTYPE); + /** * A filter that simply wraps a query. * * @param queryBuilder The query to wrap as a filter */ public QueryFilterBuilder(QueryBuilder queryBuilder) { + if (queryBuilder == null) { + throw new IllegalArgumentException("inner query cannot be null"); + } this.queryBuilder = queryBuilder; } + /** + * @return the query builder that is wrapped by this {@link QueryFilterBuilder} + */ + public QueryBuilder innerQuery() { + return this.queryBuilder; + } + @Override protected void doXContent(XContentBuilder builder, Params params) throws IOException { - builder.field(QueryFilterParser.NAME); + builder.field(NAME); queryBuilder.toXContent(builder, params); } + + @Override + protected Query doToQuery(QueryShardContext context) throws IOException { + // inner query builder can potentially be `null`, in that case we ignore it + Query innerQuery = this.queryBuilder.toQuery(context); + if (innerQuery == null) { + return null; + } + return new ConstantScoreQuery(innerQuery); + } + + @Override + protected void setFinalBoost(Query query) { + //no-op this query doesn't support boost + } + + @Override + protected int doHashCode() { + return Objects.hash(queryBuilder); + } + + @Override + protected boolean doEquals(QueryFilterBuilder other) { + return Objects.equals(queryBuilder, other.queryBuilder); + } + + @Override + protected QueryFilterBuilder doReadFrom(StreamInput in) throws IOException { + QueryBuilder innerQueryBuilder = in.readQuery(); + return new QueryFilterBuilder(innerQueryBuilder); + } + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + out.writeQuery(queryBuilder); + } + + @Override + public String getWriteableName() { + return NAME; + } } diff --git a/core/src/main/java/org/elasticsearch/index/query/QueryFilterParser.java b/core/src/main/java/org/elasticsearch/index/query/QueryFilterParser.java index 4d67eaf8859..e13661c814c 100644 --- a/core/src/main/java/org/elasticsearch/index/query/QueryFilterParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/QueryFilterParser.java @@ -19,29 +19,28 @@ package org.elasticsearch.index.query; -import org.apache.lucene.search.Query; -import org.elasticsearch.common.ParsingException; -import org.elasticsearch.common.inject.Inject; - import java.io.IOException; +/** + * Parser for query filter + * @deprecated use any query instead directly, possible since queries and filters are merged. + */ // TODO: remove when https://github.com/elastic/elasticsearch/issues/13326 is fixed @Deprecated -public class QueryFilterParser implements QueryParser { - - public static final String NAME = "query"; - - @Inject - public QueryFilterParser() { - } +public class QueryFilterParser implements QueryParser { @Override public String[] names() { - return new String[]{NAME}; + return new String[]{QueryFilterBuilder.NAME}; } @Override - public Query parse(QueryParseContext parseContext) throws IOException, ParsingException { - return parseContext.parseInnerQuery(); + public QueryFilterBuilder fromXContent(QueryParseContext parseContext) throws IOException { + return new QueryFilterBuilder(parseContext.parseInnerQueryBuilder()); + } + + @Override + public QueryFilterBuilder getBuilderPrototype() { + return QueryFilterBuilder.PROTOTYPE; } } \ No newline at end of file diff --git a/core/src/main/java/org/elasticsearch/index/query/QueryParseContext.java b/core/src/main/java/org/elasticsearch/index/query/QueryParseContext.java index 62371249423..de85abd347a 100644 --- a/core/src/main/java/org/elasticsearch/index/query/QueryParseContext.java +++ b/core/src/main/java/org/elasticsearch/index/query/QueryParseContext.java @@ -19,96 +19,38 @@ package org.elasticsearch.index.query; -import com.google.common.collect.ImmutableMap; -import org.apache.lucene.analysis.Analyzer; -import org.apache.lucene.queryparser.classic.MapperQueryParser; -import org.apache.lucene.queryparser.classic.QueryParserSettings; -import org.apache.lucene.search.Filter; -import org.apache.lucene.search.Query; -import org.apache.lucene.search.join.BitSetProducer; -import org.apache.lucene.search.similarities.Similarity; -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.ParsingException; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.Index; -import org.elasticsearch.index.analysis.AnalysisService; -import org.elasticsearch.index.fielddata.IndexFieldData; -import org.elasticsearch.index.mapper.ContentPath; -import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.mapper.Mapper; -import org.elasticsearch.index.mapper.MapperBuilders; -import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.mapper.core.StringFieldMapper; -import org.elasticsearch.index.mapper.object.ObjectMapper; -import org.elasticsearch.index.query.support.NestedScope; -import org.elasticsearch.index.similarity.SimilarityService; -import org.elasticsearch.script.ScriptService; -import org.elasticsearch.search.fetch.innerhits.InnerHitsContext; -import org.elasticsearch.search.internal.SearchContext; -import org.elasticsearch.search.lookup.SearchLookup; +import org.elasticsearch.indices.query.IndicesQueriesRegistry; import java.io.IOException; -import java.util.Arrays; -import java.util.Collection; -import java.util.HashMap; -import java.util.Map; public class QueryParseContext { private static final ParseField CACHE = new ParseField("_cache").withAllDeprecated("Elasticsearch makes its own caching decisions"); private static final ParseField CACHE_KEY = new ParseField("_cache_key").withAllDeprecated("Filters are always used as cache keys"); - private static ThreadLocal typesContext = new ThreadLocal<>(); - - public static void setTypes(String[] types) { - typesContext.set(types); - } - - public static String[] getTypes() { - return typesContext.get(); - } - - public static String[] setTypesWithPrevious(String[] types) { - String[] old = typesContext.get(); - setTypes(types); - return old; - } - - public static void removeTypes() { - typesContext.remove(); - } - - private final Index index; - - private final Version indexVersionCreated; - - private final IndexQueryParserService indexQueryParser; - - private final Map namedQueries = new HashMap<>(); - - private final MapperQueryParser queryParser = new MapperQueryParser(this); - private XContentParser parser; - private ParseFieldMatcher parseFieldMatcher = ParseFieldMatcher.EMPTY; - private boolean allowUnmappedFields; + private IndicesQueriesRegistry indicesQueriesRegistry; - private boolean mapUnmappedFieldAsString; + public QueryParseContext(IndicesQueriesRegistry registry) { + this.indicesQueriesRegistry = registry; + } - private NestedScope nestedScope; + public void reset(XContentParser jp) { + this.parseFieldMatcher = ParseFieldMatcher.EMPTY; + this.parser = jp; + if (parser != null) { + this.parser.setParseFieldMatcher(parseFieldMatcher); + } + } - private boolean isFilter; - - public QueryParseContext(Index index, IndexQueryParserService indexQueryParser) { - this.index = index; - this.indexVersionCreated = Version.indexCreated(indexQueryParser.indexSettings()); - this.indexQueryParser = indexQueryParser; + public XContentParser parser() { + return this.parser; } public void parseFieldMatcher(ParseFieldMatcher parseFieldMatcher) { @@ -118,148 +60,38 @@ public class QueryParseContext { this.parseFieldMatcher = parseFieldMatcher; } - public ParseFieldMatcher parseFieldMatcher() { - return parseFieldMatcher; - } - - public void reset(XContentParser jp) { - allowUnmappedFields = indexQueryParser.defaultAllowUnmappedFields(); - this.parseFieldMatcher = ParseFieldMatcher.EMPTY; - this.lookup = null; - this.parser = jp; - if (parser != null) { - this.parser.setParseFieldMatcher(parseFieldMatcher); - } - this.namedQueries.clear(); - this.nestedScope = new NestedScope(); - this.isFilter = false; - } - - public Index index() { - return this.index; - } - - public void parser(XContentParser parser) { - this.parser = parser; - } - - public XContentParser parser() { - return parser; - } - - public IndexQueryParserService indexQueryParserService() { - return indexQueryParser; - } - - public AnalysisService analysisService() { - return indexQueryParser.analysisService; - } - - public ScriptService scriptService() { - return indexQueryParser.scriptService; - } - - public MapperService mapperService() { - return indexQueryParser.mapperService; - } - - @Nullable - public SimilarityService similarityService() { - return indexQueryParser.similarityService; - } - - public Similarity searchSimilarity() { - return indexQueryParser.similarityService != null ? indexQueryParser.similarityService.similarity() : null; - } - - public String defaultField() { - return indexQueryParser.defaultField(); - } - - public boolean queryStringLenient() { - return indexQueryParser.queryStringLenient(); - } - - public MapperQueryParser queryParser(QueryParserSettings settings) { - queryParser.reset(settings); - return queryParser; - } - - public BitSetProducer bitsetFilter(Filter filter) { - return indexQueryParser.bitsetFilterCache.getBitSetProducer(filter); - } - - public > IFD getForField(MappedFieldType mapper) { - return indexQueryParser.fieldDataService.getForField(mapper); - } - - public void addNamedQuery(String name, Query query) { - if (query != null) { - namedQueries.put(name, query); - } - } - - public ImmutableMap copyNamedQueries() { - return ImmutableMap.copyOf(namedQueries); - } - - public void combineNamedQueries(QueryParseContext context) { - namedQueries.putAll(context.namedQueries); + public boolean isDeprecatedSetting(String setting) { + return parseFieldMatcher.match(setting, CACHE) || parseFieldMatcher.match(setting, CACHE_KEY); } /** - * Return whether we are currently parsing a filter or a query. + * @return a new QueryBuilder based on the current state of the parser */ - public boolean isFilter() { - return isFilter; - } - - public void addInnerHits(String name, InnerHitsContext.BaseInnerHits context) { - SearchContext sc = SearchContext.current(); - if (sc == null) { - throw new ParsingException(this, "inner_hits unsupported"); - } - - InnerHitsContext innerHitsContext; - if (sc.innerHits() == null) { - innerHitsContext = new InnerHitsContext(new HashMap()); - sc.innerHits(innerHitsContext); - } else { - innerHitsContext = sc.innerHits(); - } - innerHitsContext.addInnerHitDefinition(name, context); - } - - @Nullable - public Query parseInnerQuery() throws ParsingException, IOException { + public QueryBuilder parseInnerQueryBuilder() throws IOException { // move to START object XContentParser.Token token; if (parser.currentToken() != XContentParser.Token.START_OBJECT) { token = parser.nextToken(); if (token != XContentParser.Token.START_OBJECT) { - throw new ParsingException(this, "[_na] query malformed, must start with start_object"); + throw new ParsingException(parser.getTokenLocation(), "[_na] query malformed, must start with start_object"); } } token = parser.nextToken(); if (token == XContentParser.Token.END_OBJECT) { // empty query - return null; + return EmptyQueryBuilder.PROTOTYPE; } if (token != XContentParser.Token.FIELD_NAME) { - throw new ParsingException(this, "[_na] query malformed, no field after start_object"); + throw new ParsingException(parser.getTokenLocation(), "[_na] query malformed, no field after start_object"); } String queryName = parser.currentName(); // move to the next START_OBJECT token = parser.nextToken(); if (token != XContentParser.Token.START_OBJECT && token != XContentParser.Token.START_ARRAY) { - throw new ParsingException(this, "[_na] query malformed, no field after start_object"); + throw new ParsingException(parser.getTokenLocation(), "[_na] query malformed, no field after start_object"); } - QueryParser queryParser = indexQueryParser.queryParser(queryName); - if (queryParser == null) { - throw new ParsingException(this, "No query registered for [" + queryName + "]"); - } - Query result = queryParser.parse(this); + QueryBuilder result = parseInnerQueryBuilderByName(queryName); if (parser.currentToken() == XContentParser.Token.END_OBJECT || parser.currentToken() == XContentParser.Token.END_ARRAY) { // if we are at END_OBJECT, move to the next one... parser.nextToken(); @@ -267,137 +99,28 @@ public class QueryParseContext { return result; } - @Nullable - public Query parseInnerFilter() throws ParsingException, IOException { - final boolean originalIsFilter = isFilter; - try { - isFilter = true; - return parseInnerQuery(); - } finally { - isFilter = originalIsFilter; + public QueryBuilder parseInnerQueryBuilderByName(String queryName) throws IOException { + QueryParser queryParser = queryParser(queryName); + if (queryParser == null) { + throw new ParsingException(parser.getTokenLocation(), "No query registered for [" + queryName + "]"); } + return queryParser.fromXContent(this); } - public Query parseInnerFilter(String queryName) throws IOException, ParsingException { - final boolean originalIsFilter = isFilter; - try { - isFilter = true; - QueryParser queryParser = indexQueryParser.queryParser(queryName); - if (queryParser == null) { - throw new ParsingException(this, "No query registered for [" + queryName + "]"); - } - return queryParser.parse(this); - } finally { - isFilter = originalIsFilter; - } + public ParseFieldMatcher parseFieldMatcher() { + return parseFieldMatcher; } - public Collection simpleMatchToIndexNames(String pattern) { - return indexQueryParser.mapperService.simpleMatchToIndexNames(pattern, getTypes()); - } - - public MappedFieldType fieldMapper(String name) { - return failIfFieldMappingNotFound(name, indexQueryParser.mapperService.smartNameFieldType(name, getTypes())); - } - - public ObjectMapper getObjectMapper(String name) { - return indexQueryParser.mapperService.getObjectMapper(name, getTypes()); - } - - /** Gets the search analyzer for the given field, or the default if there is none present for the field - * TODO: remove this by moving defaults into mappers themselves - */ - public Analyzer getSearchAnalyzer(MappedFieldType fieldType) { - if (fieldType.searchAnalyzer() != null) { - return fieldType.searchAnalyzer(); - } - return mapperService().searchAnalyzer(); - } - - /** Gets the search quote nalyzer for the given field, or the default if there is none present for the field - * TODO: remove this by moving defaults into mappers themselves - */ - public Analyzer getSearchQuoteAnalyzer(MappedFieldType fieldType) { - if (fieldType.searchQuoteAnalyzer() != null) { - return fieldType.searchQuoteAnalyzer(); - } - return mapperService().searchQuoteAnalyzer(); - } - - public void setAllowUnmappedFields(boolean allowUnmappedFields) { - this.allowUnmappedFields = allowUnmappedFields; - } - - public void setMapUnmappedFieldAsString(boolean mapUnmappedFieldAsString) { - this.mapUnmappedFieldAsString = mapUnmappedFieldAsString; - } - - private MappedFieldType failIfFieldMappingNotFound(String name, MappedFieldType fieldMapping) { - if (allowUnmappedFields) { - return fieldMapping; - } else if (mapUnmappedFieldAsString){ - StringFieldMapper.Builder builder = MapperBuilders.stringField(name); - // it would be better to pass the real index settings, but they are not easily accessible from here... - Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, indexQueryParser.getIndexCreatedVersion()).build(); - return builder.build(new Mapper.BuilderContext(settings, new ContentPath(1))).fieldType(); - } else { - Version indexCreatedVersion = indexQueryParser.getIndexCreatedVersion(); - if (fieldMapping == null && indexCreatedVersion.onOrAfter(Version.V_1_4_0_Beta1)) { - throw new ParsingException(this, "Strict field resolution and no field mapping can be found for the field with name [" - + name + "]"); - } else { - return fieldMapping; - } - } + public void parser(XContentParser innerParser) { + this.parser = innerParser; } /** - * Returns the narrowed down explicit types, or, if not set, all types. + * Get the query parser for a specific type of query registered under its name + * @param name the name of the parser to retrieve + * @return the query parser */ - public Collection queryTypes() { - String[] types = getTypes(); - if (types == null || types.length == 0) { - return mapperService().types(); - } - if (types.length == 1 && types[0].equals("_all")) { - return mapperService().types(); - } - return Arrays.asList(types); - } - - private SearchLookup lookup = null; - - public SearchLookup lookup() { - SearchContext current = SearchContext.current(); - if (current != null) { - return current.lookup(); - } - if (lookup == null) { - lookup = new SearchLookup(mapperService(), indexQueryParser.fieldDataService, null); - } - return lookup; - } - - public long nowInMillis() { - SearchContext current = SearchContext.current(); - if (current != null) { - return current.nowInMillis(); - } - return System.currentTimeMillis(); - } - - public NestedScope nestedScope() { - return nestedScope; - } - - /** - * Return whether the setting is deprecated. - */ - public boolean isDeprecatedSetting(String setting) { - return parseFieldMatcher.match(setting, CACHE) || parseFieldMatcher.match(setting, CACHE_KEY); - } - - public Version indexVersionCreated() { - return indexVersionCreated; + private QueryParser queryParser(String name) { + return indicesQueriesRegistry.queryParsers().get(name); } } diff --git a/core/src/main/java/org/elasticsearch/index/query/QueryParser.java b/core/src/main/java/org/elasticsearch/index/query/QueryParser.java index 9553d93bce2..0a3f6d6147c 100644 --- a/core/src/main/java/org/elasticsearch/index/query/QueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/QueryParser.java @@ -19,16 +19,13 @@ package org.elasticsearch.index.query; -import org.apache.lucene.search.Query; -import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.ParsingException; - import java.io.IOException; /** - * + * Defines a query parser that is able to read and parse a query object in {@link org.elasticsearch.common.xcontent.XContent} + * format and create an internal object representing the query, implementing {@link QueryBuilder}, which can be streamed to other nodes. */ -public interface QueryParser { +public interface QueryParser> { /** * The names this query parser is registered under. @@ -36,11 +33,19 @@ public interface QueryParser { String[] names(); /** - * Parses the into a query from the current parser location. Will be at "START_OBJECT" location, - * and should end when the token is at the matching "END_OBJECT". - *

- * Returns null if this query should be ignored in the context of the DSL. + * Creates a new {@link QueryBuilder} from the query held by the {@link QueryShardContext} + * in {@link org.elasticsearch.common.xcontent.XContent} format + * + * @param parseContext + * the input parse context. The state on the parser contained in + * this context will be changed as a side effect of this method + * call + * @return the new QueryBuilder */ - @Nullable - Query parse(QueryParseContext parseContext) throws IOException, ParsingException; + QB fromXContent(QueryParseContext parseContext) throws IOException; + + /** + * @return an empty {@link QueryBuilder} instance for this parser that can be used for deserialization + */ + QB getBuilderPrototype(); } diff --git a/core/src/main/java/org/elasticsearch/index/query/QueryShardContext.java b/core/src/main/java/org/elasticsearch/index/query/QueryShardContext.java new file mode 100644 index 00000000000..23fc2bb864c --- /dev/null +++ b/core/src/main/java/org/elasticsearch/index/query/QueryShardContext.java @@ -0,0 +1,343 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query; + +import com.google.common.collect.ImmutableMap; +import org.apache.lucene.analysis.Analyzer; +import org.apache.lucene.queryparser.classic.MapperQueryParser; +import org.apache.lucene.queryparser.classic.QueryParserSettings; +import org.apache.lucene.search.Filter; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.join.BitSetProducer; +import org.apache.lucene.search.similarities.Similarity; +import org.elasticsearch.Version; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.ParseFieldMatcher; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.Index; +import org.elasticsearch.index.analysis.AnalysisService; +import org.elasticsearch.index.fielddata.IndexFieldData; +import org.elasticsearch.index.mapper.*; +import org.elasticsearch.index.mapper.core.StringFieldMapper; +import org.elasticsearch.index.mapper.object.ObjectMapper; +import org.elasticsearch.index.query.support.NestedScope; +import org.elasticsearch.script.ExecutableScript; +import org.elasticsearch.script.ScriptContext; +import org.elasticsearch.script.ScriptService; +import org.elasticsearch.script.Template; +import org.elasticsearch.search.fetch.innerhits.InnerHitsContext; +import org.elasticsearch.search.internal.SearchContext; +import org.elasticsearch.search.lookup.SearchLookup; + +import java.util.Arrays; +import java.util.Collection; +import java.util.HashMap; +import java.util.Map; + +/** + * Context object used to create lucene queries on the shard level. + */ +public class QueryShardContext { + + private static ThreadLocal typesContext = new ThreadLocal<>(); + + public static void setTypes(String[] types) { + typesContext.set(types); + } + + public static String[] getTypes() { + return typesContext.get(); + } + + public static String[] setTypesWithPrevious(String[] types) { + String[] old = typesContext.get(); + setTypes(types); + return old; + } + + public static void removeTypes() { + typesContext.remove(); + } + + private final Index index; + + private final Version indexVersionCreated; + + private final IndexQueryParserService indexQueryParser; + + private final Map namedQueries = new HashMap<>(); + + private final MapperQueryParser queryParser = new MapperQueryParser(this); + + private boolean allowUnmappedFields; + + private boolean mapUnmappedFieldAsString; + + private NestedScope nestedScope; + + private QueryParseContext parseContext; + + boolean isFilter; + + public QueryShardContext(Index index, IndexQueryParserService indexQueryParser) { + this.index = index; + this.indexVersionCreated = Version.indexCreated(indexQueryParser.indexSettings()); + this.indexQueryParser = indexQueryParser; + this.parseContext = new QueryParseContext(indexQueryParser.indicesQueriesRegistry()); + } + + public void parseFieldMatcher(ParseFieldMatcher parseFieldMatcher) { + this.parseContext.parseFieldMatcher(parseFieldMatcher); + } + + public ParseFieldMatcher parseFieldMatcher() { + return parseContext.parseFieldMatcher(); + } + + public void reset() { + allowUnmappedFields = indexQueryParser.defaultAllowUnmappedFields(); + this.parseFieldMatcher(ParseFieldMatcher.EMPTY); + this.lookup = null; + this.namedQueries.clear(); + this.nestedScope = new NestedScope(); + } + + public void reset(XContentParser jp) { + this.reset(); + this.parseContext.reset(jp); + } + + public Index index() { + return this.index; + } + + public IndexQueryParserService indexQueryParserService() { + return indexQueryParser; + } + + public AnalysisService analysisService() { + return indexQueryParser.analysisService; + } + + public ScriptService scriptService() { + return indexQueryParser.scriptService; + } + + public MapperService mapperService() { + return indexQueryParser.mapperService; + } + + public Similarity searchSimilarity() { + return indexQueryParser.similarityService != null ? indexQueryParser.similarityService.similarity() : null; + } + + public String defaultField() { + return indexQueryParser.defaultField(); + } + + public boolean queryStringLenient() { + return indexQueryParser.queryStringLenient(); + } + + public boolean queryStringAnalyzeWildcard() { + return indexQueryParser.queryStringAnalyzeWildcard(); + } + + public boolean queryStringAllowLeadingWildcard() { + return indexQueryParser.queryStringAllowLeadingWildcard(); + } + + public MapperQueryParser queryParser(QueryParserSettings settings) { + queryParser.reset(settings); + return queryParser; + } + + public BitSetProducer bitsetFilter(Filter filter) { + return indexQueryParser.bitsetFilterCache.getBitSetProducer(filter); + } + + public > IFD getForField(MappedFieldType mapper) { + return indexQueryParser.fieldDataService.getForField(mapper); + } + + public void addNamedQuery(String name, Query query) { + if (query != null) { + namedQueries.put(name, query); + } + } + + public ImmutableMap copyNamedQueries() { + return ImmutableMap.copyOf(namedQueries); + } + + public void combineNamedQueries(QueryShardContext context) { + namedQueries.putAll(context.namedQueries); + } + + /** + * Return whether we are currently parsing a filter or a query. + */ + public boolean isFilter() { + return isFilter; + } + + public void addInnerHits(String name, InnerHitsContext.BaseInnerHits context) { + SearchContext sc = SearchContext.current(); + if (sc == null) { + throw new QueryShardException(this, "inner_hits unsupported"); + } + + InnerHitsContext innerHitsContext; + if (sc.innerHits() == null) { + innerHitsContext = new InnerHitsContext(new HashMap()); + sc.innerHits(innerHitsContext); + } else { + innerHitsContext = sc.innerHits(); + } + innerHitsContext.addInnerHitDefinition(name, context); + } + + public Collection simpleMatchToIndexNames(String pattern) { + return indexQueryParser.mapperService.simpleMatchToIndexNames(pattern); + } + + public MappedFieldType fieldMapper(String name) { + return failIfFieldMappingNotFound(name, indexQueryParser.mapperService.smartNameFieldType(name, getTypes())); + } + + public ObjectMapper getObjectMapper(String name) { + return indexQueryParser.mapperService.getObjectMapper(name, getTypes()); + } + + /** + * Gets the search analyzer for the given field, or the default if there is none present for the field + * TODO: remove this by moving defaults into mappers themselves + */ + public Analyzer getSearchAnalyzer(MappedFieldType fieldType) { + if (fieldType.searchAnalyzer() != null) { + return fieldType.searchAnalyzer(); + } + return mapperService().searchAnalyzer(); + } + + /** + * Gets the search quote analyzer for the given field, or the default if there is none present for the field + * TODO: remove this by moving defaults into mappers themselves + */ + public Analyzer getSearchQuoteAnalyzer(MappedFieldType fieldType) { + if (fieldType.searchQuoteAnalyzer() != null) { + return fieldType.searchQuoteAnalyzer(); + } + return mapperService().searchQuoteAnalyzer(); + } + + public void setAllowUnmappedFields(boolean allowUnmappedFields) { + this.allowUnmappedFields = allowUnmappedFields; + } + + public void setMapUnmappedFieldAsString(boolean mapUnmappedFieldAsString) { + this.mapUnmappedFieldAsString = mapUnmappedFieldAsString; + } + + private MappedFieldType failIfFieldMappingNotFound(String name, MappedFieldType fieldMapping) { + if (allowUnmappedFields) { + return fieldMapping; + } else if (mapUnmappedFieldAsString) { + StringFieldMapper.Builder builder = MapperBuilders.stringField(name); + // it would be better to pass the real index settings, but they are not easily accessible from here... + Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, indexQueryParser.getIndexCreatedVersion()).build(); + return builder.build(new Mapper.BuilderContext(settings, new ContentPath(1))).fieldType(); + } else { + Version indexCreatedVersion = indexQueryParser.getIndexCreatedVersion(); + if (fieldMapping == null && indexCreatedVersion.onOrAfter(Version.V_1_4_0_Beta1)) { + throw new QueryShardException(this, "Strict field resolution and no field mapping can be found for the field with name [" + + name + "]"); + } else { + return fieldMapping; + } + } + } + + /** + * Returns the narrowed down explicit types, or, if not set, all types. + */ + public Collection queryTypes() { + String[] types = getTypes(); + if (types == null || types.length == 0) { + return mapperService().types(); + } + if (types.length == 1 && types[0].equals("_all")) { + return mapperService().types(); + } + return Arrays.asList(types); + } + + private SearchLookup lookup = null; + + public SearchLookup lookup() { + SearchContext current = SearchContext.current(); + if (current != null) { + return current.lookup(); + } + if (lookup == null) { + lookup = new SearchLookup(mapperService(), indexQueryParser.fieldDataService, null); + } + return lookup; + } + + public long nowInMillis() { + SearchContext current = SearchContext.current(); + if (current != null) { + return current.nowInMillis(); + } + return System.currentTimeMillis(); + } + + public NestedScope nestedScope() { + return nestedScope; + } + + public Version indexVersionCreated() { + return indexVersionCreated; + } + + public QueryParseContext parseContext() { + return this.parseContext; + } + + public boolean matchesIndices(String... indices) { + return this.indexQueryParser.matchesIndices(indices); + } + + /* + * Executes the given template, and returns the response. + */ + public BytesReference executeQueryTemplate(Template template, SearchContext searchContext) { + ExecutableScript executable = scriptService().executable(template, ScriptContext.Standard.SEARCH, searchContext); + return (BytesReference) executable.run(); + } + + public Client getClient() { + return indexQueryParser.getClient(); + } +} diff --git a/core/src/main/java/org/elasticsearch/index/query/QueryShardException.java b/core/src/main/java/org/elasticsearch/index/query/QueryShardException.java new file mode 100644 index 00000000000..1e31c7c50e1 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/index/query/QueryShardException.java @@ -0,0 +1,72 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.Index; +import org.elasticsearch.rest.RestStatus; + +import java.io.IOException; + +/** + * Exception that is thrown when creating lucene queries on the shard + */ +public class QueryShardException extends ElasticsearchException { + + public QueryShardException(QueryShardContext context, String msg, Object... args) { + this(context, msg, null, args); + } + + public QueryShardException(QueryShardContext context, String msg, Throwable cause, Object... args) { + super(msg, cause, args); + setIndex(context.index()); + } + + /** + * This constructor is provided for use in unit tests where a + * {@link QueryShardContext} may not be available + */ + public QueryShardException(Index index, String msg, Throwable cause) { + super(msg, cause); + setIndex(index); + } + + public QueryShardException(StreamInput in) throws IOException{ + super(in); + } + + @Override + public RestStatus status() { + return RestStatus.BAD_REQUEST; + } + + @Override + protected void innerToXContent(XContentBuilder builder, Params params) throws IOException { + super.innerToXContent(builder, params); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + } +} diff --git a/core/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java index 78bfac73f9f..16d65162cef 100644 --- a/core/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java @@ -19,14 +19,27 @@ package org.elasticsearch.index.query; -import com.carrotsearch.hppc.ObjectFloatHashMap; +import org.apache.lucene.queryparser.classic.MapperQueryParser; +import org.apache.lucene.queryparser.classic.QueryParserSettings; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.FuzzyQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.util.automaton.Operations; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.lucene.search.Queries; +import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.analysis.NamedAnalyzer; +import org.elasticsearch.index.query.support.QueryParsers; +import org.joda.time.DateTimeZone; import java.io.IOException; -import java.util.ArrayList; -import java.util.List; import java.util.Locale; +import java.util.Map; +import java.util.Objects; +import java.util.TreeMap; /** * A query that parses a query string and runs it. There are two modes that this operates. The first, @@ -35,72 +48,96 @@ import java.util.Locale; * (using {@link #field(String)}), will run the parsed query against the provided fields, and combine * them either using DisMax or a plain boolean query (see {@link #useDisMax(boolean)}). */ -public class QueryStringQueryBuilder extends QueryBuilder implements BoostableQueryBuilder { +public class QueryStringQueryBuilder extends AbstractQueryBuilder { - public enum Operator { - OR, - AND - } + public static final String NAME = "query_string"; + + public static final boolean DEFAULT_AUTO_GENERATE_PHRASE_QUERIES = false; + public static final int DEFAULT_MAX_DETERMINED_STATES = Operations.DEFAULT_MAX_DETERMINIZED_STATES; + public static final boolean DEFAULT_LOWERCASE_EXPANDED_TERMS = true; + public static final boolean DEFAULT_ENABLE_POSITION_INCREMENTS = true; + public static final boolean DEFAULT_ESCAPE = false; + public static final boolean DEFAULT_USE_DIS_MAX = true; + public static final int DEFAULT_FUZZY_PREFIX_LENGTH = FuzzyQuery.defaultPrefixLength; + public static final int DEFAULT_FUZZY_MAX_EXPANSIONS = FuzzyQuery.defaultMaxExpansions; + public static final int DEFAULT_PHRASE_SLOP = 0; + public static final float DEFAULT_TIE_BREAKER = 0.0f; + public static final Fuzziness DEFAULT_FUZZINESS = Fuzziness.AUTO; + public static final Operator DEFAULT_OPERATOR = Operator.OR; + public static final Locale DEFAULT_LOCALE = Locale.ROOT; + + static final QueryStringQueryBuilder PROTOTYPE = new QueryStringQueryBuilder(""); private final String queryString; private String defaultField; + /** + * Fields to query against. If left empty will query default field, + * currently _ALL. Uses a TreeMap to hold the fields so boolean clauses are + * always sorted in same order for generated Lucene query for easier + * testing. + * + * Can be changed back to HashMap once https://issues.apache.org/jira/browse/LUCENE-6305 is fixed. + */ + private final Map fieldsAndWeights = new TreeMap<>(); - private Operator defaultOperator; + private Operator defaultOperator = DEFAULT_OPERATOR; private String analyzer; private String quoteAnalyzer; private String quoteFieldSuffix; - private Boolean autoGeneratePhraseQueries; + private boolean autoGeneratePhraseQueries = DEFAULT_AUTO_GENERATE_PHRASE_QUERIES; private Boolean allowLeadingWildcard; - private Boolean lowercaseExpandedTerms; - - private Boolean enablePositionIncrements; - private Boolean analyzeWildcard; - private Locale locale; + private boolean lowercaseExpandedTerms = DEFAULT_LOWERCASE_EXPANDED_TERMS; - private float boost = -1; + private boolean enablePositionIncrements = DEFAULT_ENABLE_POSITION_INCREMENTS; + + private Locale locale = DEFAULT_LOCALE; + + private Fuzziness fuzziness = DEFAULT_FUZZINESS; + + private int fuzzyPrefixLength = DEFAULT_FUZZY_PREFIX_LENGTH; + + private int fuzzyMaxExpansions = DEFAULT_FUZZY_MAX_EXPANSIONS; + + private String rewrite; - private Fuzziness fuzziness; - private int fuzzyPrefixLength = -1; - private int fuzzyMaxExpansions = -1; private String fuzzyRewrite; - private int phraseSlop = -1; + private boolean escape = DEFAULT_ESCAPE; - private List fields; + private int phraseSlop = DEFAULT_PHRASE_SLOP; - private ObjectFloatHashMap fieldsBoosts; + private boolean useDisMax = DEFAULT_USE_DIS_MAX; - private Boolean useDisMax; - - private float tieBreaker = -1; - - private String rewrite = null; + private float tieBreaker = DEFAULT_TIE_BREAKER; private String minimumShouldMatch; private Boolean lenient; - private String queryName; - - private String timeZone; + private DateTimeZone timeZone; /** To limit effort spent determinizing regexp queries. */ - private Integer maxDeterminizedStates; - - private Boolean escape; + private int maxDeterminizedStates = DEFAULT_MAX_DETERMINED_STATES; public QueryStringQueryBuilder(String queryString) { + if (queryString == null) { + throw new IllegalArgumentException("query text missing"); + } this.queryString = queryString; } + public String queryString() { + return this.queryString; + } + /** * The default field to run against when no prefix field is specified. Only relevant when * not explicitly adding fields the query string will run against. @@ -110,14 +147,16 @@ public class QueryStringQueryBuilder extends QueryBuilder implements BoostableQu return this; } + public String defaultField() { + return this.defaultField; + } + /** - * Adds a field to run the query string against. + * Adds a field to run the query string against. The field will be associated with the default boost of {@link AbstractQueryBuilder#DEFAULT_BOOST}. + * Use {@link #field(String, float)} to set a specific boost for the field. */ public QueryStringQueryBuilder field(String field) { - if (fields == null) { - fields = new ArrayList<>(); - } - fields.add(field); + this.fieldsAndWeights.put(field, AbstractQueryBuilder.DEFAULT_BOOST); return this; } @@ -125,17 +164,23 @@ public class QueryStringQueryBuilder extends QueryBuilder implements BoostableQu * Adds a field to run the query string against with a specific boost. */ public QueryStringQueryBuilder field(String field, float boost) { - if (fields == null) { - fields = new ArrayList<>(); - } - fields.add(field); - if (fieldsBoosts == null) { - fieldsBoosts = new ObjectFloatHashMap<>(); - } - fieldsBoosts.put(field, boost); + this.fieldsAndWeights.put(field, boost); return this; } + /** + * Add several fields to run the query against with a specific boost. + */ + public QueryStringQueryBuilder fields(Map fields) { + this.fieldsAndWeights.putAll(fields); + return this; + } + + /** Returns the fields including their respective boosts to run the query against. */ + public Map fields() { + return this.fieldsAndWeights; + } + /** * When more than one field is used with the query string, should queries be combined using * dis max, or boolean query. Defaults to dis max (true). @@ -145,6 +190,10 @@ public class QueryStringQueryBuilder extends QueryBuilder implements BoostableQu return this; } + public boolean useDisMax() { + return this.useDisMax; + } + /** * When more than one field is used with the query string, and combined queries are using * dis max, control the tie breaker for it. @@ -154,6 +203,10 @@ public class QueryStringQueryBuilder extends QueryBuilder implements BoostableQu return this; } + public float tieBreaker() { + return this.tieBreaker; + } + /** * Sets the boolean operator of the query parser used to parse the query string. *

@@ -165,10 +218,14 @@ public class QueryStringQueryBuilder extends QueryBuilder implements BoostableQu * above mentioned query is parsed as capital AND of AND Hungary */ public QueryStringQueryBuilder defaultOperator(Operator defaultOperator) { - this.defaultOperator = defaultOperator; + this.defaultOperator = defaultOperator == null ? DEFAULT_OPERATOR : defaultOperator; return this; } + public Operator defaultOperator() { + return this.defaultOperator; + } + /** * The optional analyzer used to analyze the query string. Note, if a field has search analyzer * defined for it, then it will be used automatically. Defaults to the smart search analyzer. @@ -182,12 +239,11 @@ public class QueryStringQueryBuilder extends QueryBuilder implements BoostableQu * The optional analyzer used to analyze the query string for phrase searches. Note, if a field has search (quote) analyzer * defined for it, then it will be used automatically. Defaults to the smart search analyzer. */ - public QueryStringQueryBuilder quoteAnalyzer(String analyzer) { - this.quoteAnalyzer = analyzer; + public QueryStringQueryBuilder quoteAnalyzer(String quoteAnalyzer) { + this.quoteAnalyzer = quoteAnalyzer; return this; } - /** * Set to true if phrase queries will be automatically generated * when the analyzer returns more than one term from whitespace @@ -202,6 +258,10 @@ public class QueryStringQueryBuilder extends QueryBuilder implements BoostableQu return this; } + public boolean autoGeneratePhraseQueries() { + return this.autoGeneratePhraseQueries; + } + /** * Protects against too-difficult regular expression queries. */ @@ -210,14 +270,22 @@ public class QueryStringQueryBuilder extends QueryBuilder implements BoostableQu return this; } + public int maxDeterminizedStates() { + return this.maxDeterminizedStates; + } + /** * Should leading wildcards be allowed or not. Defaults to true. */ - public QueryStringQueryBuilder allowLeadingWildcard(boolean allowLeadingWildcard) { + public QueryStringQueryBuilder allowLeadingWildcard(Boolean allowLeadingWildcard) { this.allowLeadingWildcard = allowLeadingWildcard; return this; } + public Boolean allowLeadingWildcard() { + return this.allowLeadingWildcard; + } + /** * Whether terms of wildcard, prefix, fuzzy and range queries are to be automatically * lower-cased or not. Default is true. @@ -227,6 +295,10 @@ public class QueryStringQueryBuilder extends QueryBuilder implements BoostableQu return this; } + public boolean lowercaseExpandedTerms() { + return this.lowercaseExpandedTerms; + } + /** * Set to true to enable position increments in result query. Defaults to * true. @@ -239,14 +311,22 @@ public class QueryStringQueryBuilder extends QueryBuilder implements BoostableQu return this; } + public boolean enablePositionIncrements() { + return this.enablePositionIncrements; + } + /** * Set the edit distance for fuzzy queries. Default is "AUTO". */ public QueryStringQueryBuilder fuzziness(Fuzziness fuzziness) { - this.fuzziness = fuzziness; + this.fuzziness = fuzziness == null ? DEFAULT_FUZZINESS : fuzziness; return this; } + public Fuzziness fuzziness() { + return this.fuzziness; + } + /** * Set the minimum prefix length for fuzzy queries. Default is 1. */ @@ -255,16 +335,28 @@ public class QueryStringQueryBuilder extends QueryBuilder implements BoostableQu return this; } + public int fuzzyPrefixLength() { + return fuzzyPrefixLength; + } + public QueryStringQueryBuilder fuzzyMaxExpansions(int fuzzyMaxExpansions) { this.fuzzyMaxExpansions = fuzzyMaxExpansions; return this; } + public int fuzzyMaxExpansions() { + return fuzzyMaxExpansions; + } + public QueryStringQueryBuilder fuzzyRewrite(String fuzzyRewrite) { this.fuzzyRewrite = fuzzyRewrite; return this; } + public String fuzzyRewrite() { + return fuzzyRewrite; + } + /** * Sets the default slop for phrases. If zero, then exact phrase matches * are required. Default value is zero. @@ -274,32 +366,38 @@ public class QueryStringQueryBuilder extends QueryBuilder implements BoostableQu return this; } + public int phraseSlop() { + return phraseSlop; + } + /** * Set to true to enable analysis on wildcard and prefix queries. */ - public QueryStringQueryBuilder analyzeWildcard(boolean analyzeWildcard) { + public QueryStringQueryBuilder analyzeWildcard(Boolean analyzeWildcard) { this.analyzeWildcard = analyzeWildcard; return this; } + public Boolean analyzeWildcard() { + return this.analyzeWildcard; + } + public QueryStringQueryBuilder rewrite(String rewrite) { this.rewrite = rewrite; return this; } + public String rewrite() { + return this.rewrite; + } + public QueryStringQueryBuilder minimumShouldMatch(String minimumShouldMatch) { this.minimumShouldMatch = minimumShouldMatch; return this; } - /** - * Sets the boost for this query. Documents matching this query will (in addition to the normal - * weightings) have their score multiplied by the boost provided. - */ - @Override - public QueryStringQueryBuilder boost(float boost) { - this.boost = boost; - return this; + public String minimumShouldMatch() { + return this.minimumShouldMatch; } /** @@ -310,6 +408,10 @@ public class QueryStringQueryBuilder extends QueryBuilder implements BoostableQu return this; } + public String quoteFieldSuffix() { + return this.quoteFieldSuffix; + } + /** * Sets the query string parser to be lenient when parsing field values, defaults to the index * setting and if not set, defaults to false. @@ -319,27 +421,40 @@ public class QueryStringQueryBuilder extends QueryBuilder implements BoostableQu return this; } - /** - * Sets the query name for the filter that can be used when searching for matched_filters per hit. - */ - public QueryStringQueryBuilder queryName(String queryName) { - this.queryName = queryName; - return this; + public Boolean lenient() { + return this.lenient; } public QueryStringQueryBuilder locale(Locale locale) { - this.locale = locale; + this.locale = locale == null ? DEFAULT_LOCALE : locale; return this; } + public Locale locale() { + return this.locale; + } + /** * In case of date field, we can adjust the from/to fields using a timezone */ public QueryStringQueryBuilder timeZone(String timeZone) { + if (timeZone != null) { + this.timeZone = DateTimeZone.forID(timeZone); + } else { + this.timeZone = null; + } + return this; + } + + public QueryStringQueryBuilder timeZone(DateTimeZone timeZone) { this.timeZone = timeZone; return this; } + public DateTimeZone timeZone() { + return this.timeZone; + } + /** * Set to true to enable escaping of the query string */ @@ -348,98 +463,275 @@ public class QueryStringQueryBuilder extends QueryBuilder implements BoostableQu return this; } + public boolean escape() { + return this.escape; + } + @Override protected void doXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(QueryStringQueryParser.NAME); - builder.field("query", queryString); - if (defaultField != null) { - builder.field("default_field", defaultField); + builder.startObject(NAME); + builder.field("query", this.queryString); + if (this.defaultField != null) { + builder.field("default_field", this.defaultField); } - if (fields != null) { - builder.startArray("fields"); - for (String field : fields) { - if (fieldsBoosts != null && fieldsBoosts.containsKey(field)) { - field += "^" + fieldsBoosts.get(field); - } - builder.value(field); - } - builder.endArray(); + builder.startArray("fields"); + for (Map.Entry fieldEntry : this.fieldsAndWeights.entrySet()) { + builder.value(fieldEntry.getKey() + "^" + fieldEntry.getValue()); } - if (useDisMax != null) { - builder.field("use_dis_max", useDisMax); + builder.endArray(); + builder.field("use_dis_max", this.useDisMax); + builder.field("tie_breaker", this.tieBreaker); + builder.field("default_operator", this.defaultOperator.name().toLowerCase(Locale.ROOT)); + if (this.analyzer != null) { + builder.field("analyzer", this.analyzer); } - if (tieBreaker != -1) { - builder.field("tie_breaker", tieBreaker); + if (this.quoteAnalyzer != null) { + builder.field("quote_analyzer", this.quoteAnalyzer); } - if (defaultOperator != null) { - builder.field("default_operator", defaultOperator.name().toLowerCase(Locale.ROOT)); + builder.field("auto_generate_phrase_queries", this.autoGeneratePhraseQueries); + builder.field("max_determinized_states", this.maxDeterminizedStates); + if (this.allowLeadingWildcard != null) { + builder.field("allow_leading_wildcard", this.allowLeadingWildcard); } - if (analyzer != null) { - builder.field("analyzer", analyzer); + builder.field("lowercase_expanded_terms", this.lowercaseExpandedTerms); + builder.field("enable_position_increments", this.enablePositionIncrements); + this.fuzziness.toXContent(builder, params); + builder.field("fuzzy_prefix_length", this.fuzzyPrefixLength); + builder.field("fuzzy_max_expansions", this.fuzzyMaxExpansions); + if (this.fuzzyRewrite != null) { + builder.field("fuzzy_rewrite", this.fuzzyRewrite); } - if (quoteAnalyzer != null) { - builder.field("quote_analyzer", quoteAnalyzer); + builder.field("phrase_slop", this.phraseSlop); + if (this.analyzeWildcard != null) { + builder.field("analyze_wildcard", this.analyzeWildcard); } - if (autoGeneratePhraseQueries != null) { - builder.field("auto_generate_phrase_queries", autoGeneratePhraseQueries); + if (this.rewrite != null) { + builder.field("rewrite", this.rewrite); } - if (maxDeterminizedStates != null) { - builder.field("max_determinized_states", maxDeterminizedStates); + if (this.minimumShouldMatch != null) { + builder.field("minimum_should_match", this.minimumShouldMatch); } - if (allowLeadingWildcard != null) { - builder.field("allow_leading_wildcard", allowLeadingWildcard); + if (this.quoteFieldSuffix != null) { + builder.field("quote_field_suffix", this.quoteFieldSuffix); } - if (lowercaseExpandedTerms != null) { - builder.field("lowercase_expanded_terms", lowercaseExpandedTerms); + if (this.lenient != null) { + builder.field("lenient", this.lenient); } - if (enablePositionIncrements != null) { - builder.field("enable_position_increments", enablePositionIncrements); - } - if (fuzziness != null) { - fuzziness.toXContent(builder, params); - } - if (boost != -1) { - builder.field("boost", boost); - } - if (fuzzyPrefixLength != -1) { - builder.field("fuzzy_prefix_length", fuzzyPrefixLength); - } - if (fuzzyMaxExpansions != -1) { - builder.field("fuzzy_max_expansions", fuzzyMaxExpansions); - } - if (fuzzyRewrite != null) { - builder.field("fuzzy_rewrite", fuzzyRewrite); - } - if (phraseSlop != -1) { - builder.field("phrase_slop", phraseSlop); - } - if (analyzeWildcard != null) { - builder.field("analyze_wildcard", analyzeWildcard); - } - if (rewrite != null) { - builder.field("rewrite", rewrite); - } - if (minimumShouldMatch != null) { - builder.field("minimum_should_match", minimumShouldMatch); - } - if (quoteFieldSuffix != null) { - builder.field("quote_field_suffix", quoteFieldSuffix); - } - if (lenient != null) { - builder.field("lenient", lenient); - } - if (queryName != null) { - builder.field("_name", queryName); - } - if (locale != null) { - builder.field("locale", locale.toString()); - } - if (timeZone != null) { - builder.field("time_zone", timeZone); - } - if (escape != null) { - builder.field("escape", escape); + builder.field("locale", this.locale.toLanguageTag()); + if (this.timeZone != null) { + builder.field("time_zone", this.timeZone.getID()); } + builder.field("escape", this.escape); + printBoostAndQueryName(builder); builder.endObject(); } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + protected QueryStringQueryBuilder doReadFrom(StreamInput in) throws IOException { + QueryStringQueryBuilder queryStringQueryBuilder = new QueryStringQueryBuilder(in.readString()); + queryStringQueryBuilder.defaultField = in.readOptionalString(); + int size = in.readVInt(); + for (int i = 0; i < size; i++) { + queryStringQueryBuilder.fieldsAndWeights.put(in.readString(), in.readFloat()); + } + queryStringQueryBuilder.defaultOperator = Operator.readOperatorFrom(in); + queryStringQueryBuilder.analyzer = in.readOptionalString(); + queryStringQueryBuilder.quoteAnalyzer = in.readOptionalString(); + queryStringQueryBuilder.quoteFieldSuffix = in.readOptionalString(); + queryStringQueryBuilder.autoGeneratePhraseQueries = in.readBoolean(); + queryStringQueryBuilder.allowLeadingWildcard = in.readOptionalBoolean(); + queryStringQueryBuilder.analyzeWildcard = in.readOptionalBoolean(); + queryStringQueryBuilder.lowercaseExpandedTerms = in.readBoolean(); + queryStringQueryBuilder.enablePositionIncrements = in.readBoolean(); + queryStringQueryBuilder.locale = Locale.forLanguageTag(in.readString()); + queryStringQueryBuilder.fuzziness = Fuzziness.readFuzzinessFrom(in); + queryStringQueryBuilder.fuzzyPrefixLength = in.readVInt(); + queryStringQueryBuilder.fuzzyMaxExpansions = in.readVInt(); + queryStringQueryBuilder.fuzzyRewrite = in.readOptionalString(); + queryStringQueryBuilder.phraseSlop = in.readVInt(); + queryStringQueryBuilder.useDisMax = in.readBoolean(); + queryStringQueryBuilder.tieBreaker = in.readFloat(); + queryStringQueryBuilder.rewrite = in.readOptionalString(); + queryStringQueryBuilder.minimumShouldMatch = in.readOptionalString(); + queryStringQueryBuilder.lenient = in.readOptionalBoolean(); + if (in.readBoolean()) { + queryStringQueryBuilder.timeZone = DateTimeZone.forID(in.readString()); + } + queryStringQueryBuilder.escape = in.readBoolean(); + queryStringQueryBuilder.maxDeterminizedStates = in.readVInt(); + return queryStringQueryBuilder; + } + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + out.writeString(this.queryString); + out.writeOptionalString(this.defaultField); + out.writeVInt(this.fieldsAndWeights.size()); + for (Map.Entry fieldsEntry : this.fieldsAndWeights.entrySet()) { + out.writeString(fieldsEntry.getKey()); + out.writeFloat(fieldsEntry.getValue()); + } + this.defaultOperator.writeTo(out); + out.writeOptionalString(this.analyzer); + out.writeOptionalString(this.quoteAnalyzer); + out.writeOptionalString(this.quoteFieldSuffix); + out.writeBoolean(this.autoGeneratePhraseQueries); + out.writeOptionalBoolean(this.allowLeadingWildcard); + out.writeOptionalBoolean(this.analyzeWildcard); + out.writeBoolean(this.lowercaseExpandedTerms); + out.writeBoolean(this.enablePositionIncrements); + out.writeString(this.locale.toLanguageTag()); + this.fuzziness.writeTo(out); + out.writeVInt(this.fuzzyPrefixLength); + out.writeVInt(this.fuzzyMaxExpansions); + out.writeOptionalString(this.fuzzyRewrite); + out.writeVInt(this.phraseSlop); + out.writeBoolean(this.useDisMax); + out.writeFloat(this.tieBreaker); + out.writeOptionalString(this.rewrite); + out.writeOptionalString(this.minimumShouldMatch); + out.writeOptionalBoolean(this.lenient); + if (this.timeZone == null) { + out.writeBoolean(false); + } else { + out.writeBoolean(true); + out.writeString(this.timeZone.getID()); + } + out.writeBoolean(this.escape); + out.writeVInt(this.maxDeterminizedStates); + } + + @Override + protected boolean doEquals(QueryStringQueryBuilder other) { + return Objects.equals(queryString, other.queryString) && + Objects.equals(defaultField, other.defaultField) && + Objects.equals(fieldsAndWeights, other.fieldsAndWeights) && + Objects.equals(defaultOperator, other.defaultOperator) && + Objects.equals(analyzer, other.analyzer) && + Objects.equals(quoteAnalyzer, other.quoteAnalyzer) && + Objects.equals(quoteFieldSuffix, other.quoteFieldSuffix) && + Objects.equals(autoGeneratePhraseQueries, other.autoGeneratePhraseQueries) && + Objects.equals(allowLeadingWildcard, other.allowLeadingWildcard) && + Objects.equals(lowercaseExpandedTerms, other.lowercaseExpandedTerms) && + Objects.equals(enablePositionIncrements, other.enablePositionIncrements) && + Objects.equals(analyzeWildcard, other.analyzeWildcard) && + Objects.equals(locale.toLanguageTag(), other.locale.toLanguageTag()) && + Objects.equals(fuzziness, other.fuzziness) && + Objects.equals(fuzzyPrefixLength, other.fuzzyPrefixLength) && + Objects.equals(fuzzyMaxExpansions, other.fuzzyMaxExpansions) && + Objects.equals(fuzzyRewrite, other.fuzzyRewrite) && + Objects.equals(phraseSlop, other.phraseSlop) && + Objects.equals(useDisMax, other.useDisMax) && + Objects.equals(tieBreaker, other.tieBreaker) && + Objects.equals(rewrite, other.rewrite) && + Objects.equals(minimumShouldMatch, other.minimumShouldMatch) && + Objects.equals(lenient, other.lenient) && + timeZone == null ? other.timeZone == null : other.timeZone != null && Objects.equals(timeZone.getID(), other.timeZone.getID()) && + Objects.equals(escape, other.escape) && + Objects.equals(maxDeterminizedStates, other.maxDeterminizedStates); + } + + @Override + protected int doHashCode() { + return Objects.hash(queryString, defaultField, fieldsAndWeights, defaultOperator, analyzer, quoteAnalyzer, + quoteFieldSuffix, autoGeneratePhraseQueries, allowLeadingWildcard, lowercaseExpandedTerms, + enablePositionIncrements, analyzeWildcard, locale.toLanguageTag(), fuzziness, fuzzyPrefixLength, + fuzzyMaxExpansions, fuzzyRewrite, phraseSlop, useDisMax, tieBreaker, rewrite, minimumShouldMatch, lenient, + timeZone == null ? 0 : timeZone.getID(), escape, maxDeterminizedStates); + } + + @Override + protected Query doToQuery(QueryShardContext context) throws IOException { + //TODO would be nice to have all the settings in one place: some change though at query execution time + //e.g. field names get expanded to concrete names, defaults get resolved sometimes to settings values etc. + QueryParserSettings qpSettings; + if (this.escape) { + qpSettings = new QueryParserSettings(org.apache.lucene.queryparser.classic.QueryParser.escape(this.queryString)); + } else { + qpSettings = new QueryParserSettings(this.queryString); + } + qpSettings.defaultField(this.defaultField == null ? context.defaultField() : this.defaultField); + Map resolvedFields = new TreeMap<>(); + for (Map.Entry fieldsEntry : fieldsAndWeights.entrySet()) { + String fieldName = fieldsEntry.getKey(); + Float weight = fieldsEntry.getValue(); + if (Regex.isSimpleMatchPattern(fieldName)) { + for (String resolvedFieldName : context.mapperService().simpleMatchToIndexNames(fieldName)) { + resolvedFields.put(resolvedFieldName, weight); + } + } else { + resolvedFields.put(fieldName, weight); + } + } + qpSettings.fieldsAndWeights(resolvedFields); + qpSettings.defaultOperator(defaultOperator.toQueryParserOperator()); + + if (analyzer == null) { + qpSettings.defaultAnalyzer(context.mapperService().searchAnalyzer()); + } else { + NamedAnalyzer namedAnalyzer = context.analysisService().analyzer(analyzer); + if (namedAnalyzer == null) { + throw new QueryShardException(context, "[query_string] analyzer [" + analyzer + "] not found"); + } + qpSettings.forceAnalyzer(namedAnalyzer); + } + if (quoteAnalyzer != null) { + NamedAnalyzer namedAnalyzer = context.analysisService().analyzer(quoteAnalyzer); + if (namedAnalyzer == null) { + throw new QueryShardException(context, "[query_string] quote_analyzer [" + quoteAnalyzer + "] not found"); + } + qpSettings.forceQuoteAnalyzer(namedAnalyzer); + } else if (analyzer != null) { + qpSettings.forceQuoteAnalyzer(qpSettings.analyzer()); + } else { + qpSettings.defaultQuoteAnalyzer(context.mapperService().searchQuoteAnalyzer()); + } + + qpSettings.quoteFieldSuffix(quoteFieldSuffix); + qpSettings.autoGeneratePhraseQueries(autoGeneratePhraseQueries); + qpSettings.allowLeadingWildcard(allowLeadingWildcard == null ? context.queryStringAllowLeadingWildcard() : allowLeadingWildcard); + qpSettings.analyzeWildcard(analyzeWildcard == null ? context.queryStringAnalyzeWildcard() : analyzeWildcard); + qpSettings.lowercaseExpandedTerms(lowercaseExpandedTerms); + qpSettings.enablePositionIncrements(enablePositionIncrements); + qpSettings.locale(locale); + qpSettings.fuzziness(fuzziness); + qpSettings.fuzzyPrefixLength(fuzzyPrefixLength); + qpSettings.fuzzyMaxExpansions(fuzzyMaxExpansions); + qpSettings.fuzzyRewriteMethod(QueryParsers.parseRewriteMethod(context.parseFieldMatcher(), this.fuzzyRewrite)); + qpSettings.phraseSlop(phraseSlop); + qpSettings.useDisMax(useDisMax); + qpSettings.tieBreaker(tieBreaker); + qpSettings.rewriteMethod(QueryParsers.parseRewriteMethod(context.parseFieldMatcher(), this.rewrite)); + qpSettings.lenient(lenient == null ? context.queryStringLenient() : lenient); + qpSettings.timeZone(timeZone); + qpSettings.maxDeterminizedStates(maxDeterminizedStates); + + MapperQueryParser queryParser = context.queryParser(qpSettings); + Query query; + try { + query = queryParser.parse(queryString); + } catch (org.apache.lucene.queryparser.classic.ParseException e) { + throw new QueryShardException(context, "Failed to parse query [" + this.queryString + "]", e); + } + + if (query == null) { + return null; + } + query = Queries.fixNegativeQueryIfNeeded(query); + if (query instanceof BooleanQuery) { + query = Queries.applyMinimumShouldMatch((BooleanQuery) query, this.minimumShouldMatch()); + } + return query; + } + + @Override + protected void setFinalBoost(Query query) { + //we need to preserve the boost that came out of the parsing phase + query.setBoost(query.getBoost() * boost); + } } diff --git a/core/src/main/java/org/elasticsearch/index/query/QueryStringQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/QueryStringQueryParser.java index 3642bc2824f..f5dbb250805 100644 --- a/core/src/main/java/org/elasticsearch/index/query/QueryStringQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/QueryStringQueryParser.java @@ -19,67 +19,62 @@ package org.elasticsearch.index.query; -import com.carrotsearch.hppc.ObjectFloatHashMap; -import org.apache.lucene.queryparser.classic.MapperQueryParser; -import org.apache.lucene.queryparser.classic.QueryParserSettings; -import org.apache.lucene.search.BooleanQuery; -import org.apache.lucene.search.Query; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.lucene.search.Queries; -import org.elasticsearch.common.regex.Regex; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.Fuzziness; -import org.elasticsearch.common.util.LocaleUtils; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.analysis.NamedAnalyzer; -import org.elasticsearch.index.query.support.QueryParsers; -import org.joda.time.DateTimeZone; import java.io.IOException; -import java.util.ArrayList; +import java.util.HashMap; import java.util.Locale; - -import static org.elasticsearch.common.lucene.search.Queries.fixNegativeQueryIfNeeded; +import java.util.Map; /** - * + * Parser for query_string query */ public class QueryStringQueryParser implements QueryParser { - public static final String NAME = "query_string"; private static final ParseField FUZZINESS = Fuzziness.FIELD.withDeprecation("fuzzy_min_sim"); - private final boolean defaultAnalyzeWildcard; - private final boolean defaultAllowLeadingWildcard; - - @Inject - public QueryStringQueryParser(Settings settings) { - this.defaultAnalyzeWildcard = settings.getAsBoolean("indices.query.query_string.analyze_wildcard", QueryParserSettings.DEFAULT_ANALYZE_WILDCARD); - this.defaultAllowLeadingWildcard = settings.getAsBoolean("indices.query.query_string.allowLeadingWildcard", QueryParserSettings.DEFAULT_ALLOW_LEADING_WILDCARD); - } - @Override public String[] names() { - return new String[]{NAME, Strings.toCamelCase(NAME)}; + return new String[]{QueryStringQueryBuilder.NAME, Strings.toCamelCase(QueryStringQueryBuilder.NAME)}; } @Override - public Query parse(QueryParseContext parseContext) throws IOException, ParsingException { + public QueryBuilder fromXContent(QueryParseContext parseContext) throws IOException { XContentParser parser = parseContext.parser(); - - String queryName = null; - QueryParserSettings qpSettings = new QueryParserSettings(); - qpSettings.defaultField(parseContext.defaultField()); - qpSettings.lenient(parseContext.queryStringLenient()); - qpSettings.analyzeWildcard(defaultAnalyzeWildcard); - qpSettings.allowLeadingWildcard(defaultAllowLeadingWildcard); - qpSettings.locale(Locale.ROOT); - String currentFieldName = null; XContentParser.Token token; + String queryString = null; + String defaultField = null; + String analyzer = null; + String quoteAnalyzer = null; + String queryName = null; + float boost = AbstractQueryBuilder.DEFAULT_BOOST; + boolean autoGeneratePhraseQueries = QueryStringQueryBuilder.DEFAULT_AUTO_GENERATE_PHRASE_QUERIES; + int maxDeterminizedStates = QueryStringQueryBuilder.DEFAULT_MAX_DETERMINED_STATES; + boolean lowercaseExpandedTerms = QueryStringQueryBuilder.DEFAULT_LOWERCASE_EXPANDED_TERMS; + boolean enablePositionIncrements = QueryStringQueryBuilder.DEFAULT_ENABLE_POSITION_INCREMENTS; + boolean escape = QueryStringQueryBuilder.DEFAULT_ESCAPE; + boolean useDisMax = QueryStringQueryBuilder.DEFAULT_USE_DIS_MAX; + int fuzzyPrefixLength = QueryStringQueryBuilder.DEFAULT_FUZZY_PREFIX_LENGTH; + int fuzzyMaxExpansions = QueryStringQueryBuilder.DEFAULT_FUZZY_MAX_EXPANSIONS; + int phraseSlop = QueryStringQueryBuilder.DEFAULT_PHRASE_SLOP; + float tieBreaker = QueryStringQueryBuilder.DEFAULT_TIE_BREAKER; + Boolean analyzeWildcard = null; + Boolean allowLeadingWildcard = null; + String minimumShouldMatch = null; + String quoteFieldSuffix = null; + Boolean lenient = null; + Operator defaultOperator = QueryStringQueryBuilder.DEFAULT_OPERATOR; + String timeZone = null; + Locale locale = QueryStringQueryBuilder.DEFAULT_LOCALE; + Fuzziness fuzziness = QueryStringQueryBuilder.DEFAULT_FUZZINESS; + String fuzzyRewrite = null; + String rewrite = null; + Map fieldsAndWeights = new HashMap<>(); while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); @@ -87,7 +82,7 @@ public class QueryStringQueryParser implements QueryParser { if ("fields".equals(currentFieldName)) { while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { String fField = null; - float fBoost = -1; + float fBoost = AbstractQueryBuilder.DEFAULT_BOOST; char[] text = parser.textCharacters(); int end = parser.textOffset() + parser.textLength(); for (int i = parser.textOffset(); i < end; i++) { @@ -101,147 +96,113 @@ public class QueryStringQueryParser implements QueryParser { if (fField == null) { fField = parser.text(); } - if (qpSettings.fields() == null) { - qpSettings.fields(new ArrayList()); - } - - if (Regex.isSimpleMatchPattern(fField)) { - for (String field : parseContext.mapperService().simpleMatchToIndexNames(fField)) { - qpSettings.fields().add(field); - if (fBoost != -1) { - if (qpSettings.boosts() == null) { - qpSettings.boosts(new ObjectFloatHashMap()); - } - qpSettings.boosts().put(field, fBoost); - } - } - } else { - qpSettings.fields().add(fField); - if (fBoost != -1) { - if (qpSettings.boosts() == null) { - qpSettings.boosts(new ObjectFloatHashMap()); - } - qpSettings.boosts().put(fField, fBoost); - } - } + fieldsAndWeights.put(fField, fBoost); } } else { - throw new ParsingException(parseContext, "[query_string] query does not support [" + currentFieldName - + "]"); + throw new ParsingException(parser.getTokenLocation(), "[query_string] query does not support [" + currentFieldName + "]"); } } else if (token.isValue()) { if ("query".equals(currentFieldName)) { - qpSettings.queryString(parser.text()); + queryString = parser.text(); } else if ("default_field".equals(currentFieldName) || "defaultField".equals(currentFieldName)) { - qpSettings.defaultField(parser.text()); + defaultField = parser.text(); } else if ("default_operator".equals(currentFieldName) || "defaultOperator".equals(currentFieldName)) { - String op = parser.text(); - if ("or".equalsIgnoreCase(op)) { - qpSettings.defaultOperator(org.apache.lucene.queryparser.classic.QueryParser.Operator.OR); - } else if ("and".equalsIgnoreCase(op)) { - qpSettings.defaultOperator(org.apache.lucene.queryparser.classic.QueryParser.Operator.AND); - } else { - throw new ParsingException(parseContext, "Query default operator [" + op + "] is not allowed"); - } + defaultOperator = Operator.fromString(parser.text()); } else if ("analyzer".equals(currentFieldName)) { - NamedAnalyzer analyzer = parseContext.analysisService().analyzer(parser.text()); - if (analyzer == null) { - throw new ParsingException(parseContext, "[query_string] analyzer [" + parser.text() + "] not found"); - } - qpSettings.forcedAnalyzer(analyzer); + analyzer = parser.text(); } else if ("quote_analyzer".equals(currentFieldName) || "quoteAnalyzer".equals(currentFieldName)) { - NamedAnalyzer analyzer = parseContext.analysisService().analyzer(parser.text()); - if (analyzer == null) { - throw new ParsingException(parseContext, "[query_string] quote_analyzer [" + parser.text() - + "] not found"); - } - qpSettings.forcedQuoteAnalyzer(analyzer); + quoteAnalyzer = parser.text(); } else if ("allow_leading_wildcard".equals(currentFieldName) || "allowLeadingWildcard".equals(currentFieldName)) { - qpSettings.allowLeadingWildcard(parser.booleanValue()); + allowLeadingWildcard = parser.booleanValue(); } else if ("auto_generate_phrase_queries".equals(currentFieldName) || "autoGeneratePhraseQueries".equals(currentFieldName)) { - qpSettings.autoGeneratePhraseQueries(parser.booleanValue()); + autoGeneratePhraseQueries = parser.booleanValue(); } else if ("max_determinized_states".equals(currentFieldName) || "maxDeterminizedStates".equals(currentFieldName)) { - qpSettings.maxDeterminizedStates(parser.intValue()); + maxDeterminizedStates = parser.intValue(); } else if ("lowercase_expanded_terms".equals(currentFieldName) || "lowercaseExpandedTerms".equals(currentFieldName)) { - qpSettings.lowercaseExpandedTerms(parser.booleanValue()); + lowercaseExpandedTerms = parser.booleanValue(); } else if ("enable_position_increments".equals(currentFieldName) || "enablePositionIncrements".equals(currentFieldName)) { - qpSettings.enablePositionIncrements(parser.booleanValue()); + enablePositionIncrements = parser.booleanValue(); } else if ("escape".equals(currentFieldName)) { - qpSettings.escape(parser.booleanValue()); + escape = parser.booleanValue(); } else if ("use_dis_max".equals(currentFieldName) || "useDisMax".equals(currentFieldName)) { - qpSettings.useDisMax(parser.booleanValue()); + useDisMax = parser.booleanValue(); } else if ("fuzzy_prefix_length".equals(currentFieldName) || "fuzzyPrefixLength".equals(currentFieldName)) { - qpSettings.fuzzyPrefixLength(parser.intValue()); + fuzzyPrefixLength = parser.intValue(); } else if ("fuzzy_max_expansions".equals(currentFieldName) || "fuzzyMaxExpansions".equals(currentFieldName)) { - qpSettings.fuzzyMaxExpansions(parser.intValue()); + fuzzyMaxExpansions = parser.intValue(); } else if ("fuzzy_rewrite".equals(currentFieldName) || "fuzzyRewrite".equals(currentFieldName)) { - qpSettings.fuzzyRewriteMethod(QueryParsers.parseRewriteMethod(parseContext.parseFieldMatcher(), parser.textOrNull())); + fuzzyRewrite = parser.textOrNull(); } else if ("phrase_slop".equals(currentFieldName) || "phraseSlop".equals(currentFieldName)) { - qpSettings.phraseSlop(parser.intValue()); + phraseSlop = parser.intValue(); } else if (parseContext.parseFieldMatcher().match(currentFieldName, FUZZINESS)) { - qpSettings.setFuzziness(Fuzziness.parse(parser)); + fuzziness = Fuzziness.parse(parser); } else if ("boost".equals(currentFieldName)) { - qpSettings.boost(parser.floatValue()); + boost = parser.floatValue(); } else if ("tie_breaker".equals(currentFieldName) || "tieBreaker".equals(currentFieldName)) { - qpSettings.tieBreaker(parser.floatValue()); + tieBreaker = parser.floatValue(); } else if ("analyze_wildcard".equals(currentFieldName) || "analyzeWildcard".equals(currentFieldName)) { - qpSettings.analyzeWildcard(parser.booleanValue()); + analyzeWildcard = parser.booleanValue(); } else if ("rewrite".equals(currentFieldName)) { - qpSettings.rewriteMethod(QueryParsers.parseRewriteMethod(parseContext.parseFieldMatcher(), parser.textOrNull())); + rewrite = parser.textOrNull(); } else if ("minimum_should_match".equals(currentFieldName) || "minimumShouldMatch".equals(currentFieldName)) { - qpSettings.minimumShouldMatch(parser.textOrNull()); + minimumShouldMatch = parser.textOrNull(); } else if ("quote_field_suffix".equals(currentFieldName) || "quoteFieldSuffix".equals(currentFieldName)) { - qpSettings.quoteFieldSuffix(parser.textOrNull()); + quoteFieldSuffix = parser.textOrNull(); } else if ("lenient".equalsIgnoreCase(currentFieldName)) { - qpSettings.lenient(parser.booleanValue()); + lenient = parser.booleanValue(); } else if ("locale".equals(currentFieldName)) { String localeStr = parser.text(); - qpSettings.locale(LocaleUtils.parse(localeStr)); - } else if ("time_zone".equals(currentFieldName)) { + locale = Locale.forLanguageTag(localeStr); + } else if ("time_zone".equals(currentFieldName) || "timeZone".equals(currentFieldName)) { try { - qpSettings.timeZone(DateTimeZone.forID(parser.text())); + timeZone = parser.text(); } catch (IllegalArgumentException e) { - throw new ParsingException(parseContext, - "[query_string] time_zone [" + parser.text() + "] is unknown"); + throw new ParsingException(parser.getTokenLocation(), "[query_string] time_zone [" + parser.text() + "] is unknown"); } } else if ("_name".equals(currentFieldName)) { queryName = parser.text(); } else { - throw new ParsingException(parseContext, "[query_string] query does not support [" + currentFieldName - + "]"); + throw new ParsingException(parser.getTokenLocation(), "[query_string] query does not support [" + currentFieldName + "]"); } } } - if (qpSettings.queryString() == null) { - throw new ParsingException(parseContext, "query_string must be provided with a [query]"); - } - qpSettings.defaultAnalyzer(parseContext.mapperService().searchAnalyzer()); - qpSettings.defaultQuoteAnalyzer(parseContext.mapperService().searchQuoteAnalyzer()); - - if (qpSettings.escape()) { - qpSettings.queryString(org.apache.lucene.queryparser.classic.QueryParser.escape(qpSettings.queryString())); + if (queryString == null) { + throw new ParsingException(parser.getTokenLocation(), "query_string must be provided with a [query]"); } - MapperQueryParser queryParser = parseContext.queryParser(qpSettings); + QueryStringQueryBuilder queryStringQuery = new QueryStringQueryBuilder(queryString); + queryStringQuery.fields(fieldsAndWeights); + queryStringQuery.defaultField(defaultField); + queryStringQuery.defaultOperator(defaultOperator); + queryStringQuery.analyzer(analyzer); + queryStringQuery.quoteAnalyzer(quoteAnalyzer); + queryStringQuery.allowLeadingWildcard(allowLeadingWildcard); + queryStringQuery.autoGeneratePhraseQueries(autoGeneratePhraseQueries); + queryStringQuery.maxDeterminizedStates(maxDeterminizedStates); + queryStringQuery.lowercaseExpandedTerms(lowercaseExpandedTerms); + queryStringQuery.enablePositionIncrements(enablePositionIncrements); + queryStringQuery.escape(escape); + queryStringQuery.useDisMax(useDisMax); + queryStringQuery.fuzzyPrefixLength(fuzzyPrefixLength); + queryStringQuery.fuzzyMaxExpansions(fuzzyMaxExpansions); + queryStringQuery.fuzzyRewrite(fuzzyRewrite); + queryStringQuery.phraseSlop(phraseSlop); + queryStringQuery.fuzziness(fuzziness); + queryStringQuery.tieBreaker(tieBreaker); + queryStringQuery.analyzeWildcard(analyzeWildcard); + queryStringQuery.rewrite(rewrite); + queryStringQuery.minimumShouldMatch(minimumShouldMatch); + queryStringQuery.quoteFieldSuffix(quoteFieldSuffix); + queryStringQuery.lenient(lenient); + queryStringQuery.timeZone(timeZone); + queryStringQuery.locale(locale); + queryStringQuery.boost(boost); + queryStringQuery.queryName(queryName); + return queryStringQuery; + } - try { - Query query = queryParser.parse(qpSettings.queryString()); - if (query == null) { - return null; - } - if (qpSettings.boost() != QueryParserSettings.DEFAULT_BOOST) { - query.setBoost(query.getBoost() * qpSettings.boost()); - } - query = fixNegativeQueryIfNeeded(query); - if (query instanceof BooleanQuery) { - query = Queries.applyMinimumShouldMatch((BooleanQuery) query, qpSettings.minimumShouldMatch()); - } - if (queryName != null) { - parseContext.addNamedQuery(queryName, query); - } - return query; - } catch (org.apache.lucene.queryparser.classic.ParseException e) { - throw new ParsingException(parseContext, "Failed to parse query [" + qpSettings.queryString() + "]", e); - } + @Override + public QueryStringQueryBuilder getBuilderPrototype() { + return QueryStringQueryBuilder.PROTOTYPE; } } diff --git a/core/src/main/java/org/elasticsearch/index/query/QueryValidationException.java b/core/src/main/java/org/elasticsearch/index/query/QueryValidationException.java new file mode 100644 index 00000000000..9e0ee2a1c33 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/index/query/QueryValidationException.java @@ -0,0 +1,62 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query; + +import org.elasticsearch.common.ValidationException; + +import java.util.List; + +/** + * This exception can be used to indicate various reasons why validation of a query has failed. + */ +public class QueryValidationException extends ValidationException { + + /** + * Helper method than can be used to add error messages to an existing {@link QueryValidationException}. + * When passing {@code null} as the initial exception, a new exception is created. + * + * @param queryId the query that caused the error + * @param validationError the error message to add to an initial exception + * @param validationException an initial exception. Can be {@code null}, in which case a new exception is created. + * @return a {@link QueryValidationException} with added validation error message + */ + public static QueryValidationException addValidationError(String queryId, String validationError, QueryValidationException validationException) { + if (validationException == null) { + validationException = new QueryValidationException(); + } + validationException.addValidationError("[" + queryId + "] " + validationError); + return validationException; + } + + /** + * Helper method than can be used to add error messages to an existing {@link QueryValidationException}. + * When passing {@code null} as the initial exception, a new exception is created. + * @param validationErrors the error messages to add to an initial exception + * @param validationException an initial exception. Can be {@code null}, in which case a new exception is created. + * @return a {@link QueryValidationException} with added validation error message + */ + public static QueryValidationException addValidationErrors(List validationErrors, QueryValidationException validationException) { + if (validationException == null) { + validationException = new QueryValidationException(); + } + validationException.addValidationErrors(validationErrors); + return validationException; + } +} diff --git a/core/src/main/java/org/elasticsearch/index/query/RangeQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/RangeQueryBuilder.java index da23698a3be..1c8b57c3879 100644 --- a/core/src/main/java/org/elasticsearch/index/query/RangeQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/RangeQueryBuilder.java @@ -19,187 +19,116 @@ package org.elasticsearch.index.query; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.TermRangeQuery; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.joda.DateMathParser; +import org.elasticsearch.common.joda.FormatDateTimeFormatter; +import org.elasticsearch.common.joda.Joda; +import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.core.DateFieldMapper; +import org.joda.time.DateTimeZone; import java.io.IOException; +import java.util.Objects; /** * A Query that matches documents within an range of terms. */ -public class RangeQueryBuilder extends MultiTermQueryBuilder implements BoostableQueryBuilder { +public class RangeQueryBuilder extends AbstractQueryBuilder implements MultiTermQueryBuilder { + + public static final boolean DEFAULT_INCLUDE_UPPER = true; + + public static final boolean DEFAULT_INCLUDE_LOWER = true; + + public static final String NAME = "range"; + + private final String fieldName; - private final String name; private Object from; + private Object to; - private String timeZone; - private boolean includeLower = true; - private boolean includeUpper = true; - private float boost = -1; - private String queryName; - private String format; + + private DateTimeZone timeZone; + + private boolean includeLower = DEFAULT_INCLUDE_LOWER; + + private boolean includeUpper = DEFAULT_INCLUDE_UPPER; + + private FormatDateTimeFormatter format; + + static final RangeQueryBuilder PROTOTYPE = new RangeQueryBuilder("field"); /** * A Query that matches documents within an range of terms. * - * @param name The field name + * @param fieldName The field name */ - public RangeQueryBuilder(String name) { - this.name = name; + public RangeQueryBuilder(String fieldName) { + if (Strings.isEmpty(fieldName)) { + throw new IllegalArgumentException("field name is null or empty"); + } + this.fieldName = fieldName; + } + + /** + * Get the field name for this query. + */ + public String fieldName() { + return this.fieldName; + } + + /** + * The from part of the range query. Null indicates unbounded. + * In case lower bound is assigned to a string, we internally convert it to a {@link BytesRef} because + * in {@link RangeQueryParser} field are later parsed as {@link BytesRef} and we need internal representation + * of query to be equal regardless of whether it was created from XContent or via Java API. + */ + public RangeQueryBuilder from(Object from, boolean includeLower) { + this.from = convertToBytesRefIfString(from); + this.includeLower = includeLower; + return this; } /** * The from part of the range query. Null indicates unbounded. */ public RangeQueryBuilder from(Object from) { - this.from = from; - return this; + return from(from, this.includeLower); } /** - * The from part of the range query. Null indicates unbounded. + * Gets the lower range value for this query. */ - public RangeQueryBuilder from(String from) { - this.from = from; - return this; - } - - /** - * The from part of the range query. Null indicates unbounded. - */ - public RangeQueryBuilder from(int from) { - this.from = from; - return this; - } - - /** - * The from part of the range query. Null indicates unbounded. - */ - public RangeQueryBuilder from(long from) { - this.from = from; - return this; - } - - /** - * The from part of the range query. Null indicates unbounded. - */ - public RangeQueryBuilder from(float from) { - this.from = from; - return this; - } - - /** - * The from part of the range query. Null indicates unbounded. - */ - public RangeQueryBuilder from(double from) { - this.from = from; - return this; - } - - /** - * The from part of the range query. Null indicates unbounded. - */ - public RangeQueryBuilder gt(String from) { - this.from = from; - this.includeLower = false; - return this; + public Object from() { + return convertToStringIfBytesRef(this.from); } /** * The from part of the range query. Null indicates unbounded. */ public RangeQueryBuilder gt(Object from) { - this.from = from; - this.includeLower = false; - return this; - } - - /** - * The from part of the range query. Null indicates unbounded. - */ - public RangeQueryBuilder gt(int from) { - this.from = from; - this.includeLower = false; - return this; - } - - /** - * The from part of the range query. Null indicates unbounded. - */ - public RangeQueryBuilder gt(long from) { - this.from = from; - this.includeLower = false; - return this; - } - - /** - * The from part of the range query. Null indicates unbounded. - */ - public RangeQueryBuilder gt(float from) { - this.from = from; - this.includeLower = false; - return this; - } - - /** - * The from part of the range query. Null indicates unbounded. - */ - public RangeQueryBuilder gt(double from) { - this.from = from; - this.includeLower = false; - return this; - } - - /** - * The from part of the range query. Null indicates unbounded. - */ - public RangeQueryBuilder gte(String from) { - this.from = from; - this.includeLower = true; - return this; + return from(from, false); } /** * The from part of the range query. Null indicates unbounded. */ public RangeQueryBuilder gte(Object from) { - this.from = from; - this.includeLower = true; - return this; + return from(from, true); } /** - * The from part of the range query. Null indicates unbounded. + * The to part of the range query. Null indicates unbounded. */ - public RangeQueryBuilder gte(int from) { - this.from = from; - this.includeLower = true; - return this; - } - - /** - * The from part of the range query. Null indicates unbounded. - */ - public RangeQueryBuilder gte(long from) { - this.from = from; - this.includeLower = true; - return this; - } - - /** - * The from part of the range query. Null indicates unbounded. - */ - public RangeQueryBuilder gte(float from) { - this.from = from; - this.includeLower = true; - return this; - } - - /** - * The from part of the range query. Null indicates unbounded. - */ - public RangeQueryBuilder gte(double from) { - this.from = from; - this.includeLower = true; + public RangeQueryBuilder to(Object to, boolean includeUpper) { + this.to = convertToBytesRefIfString(to); + this.includeUpper = includeUpper; return this; } @@ -207,156 +136,31 @@ public class RangeQueryBuilder extends MultiTermQueryBuilder implements Boostabl * The to part of the range query. Null indicates unbounded. */ public RangeQueryBuilder to(Object to) { - this.to = to; - return this; + return to(to, this.includeUpper); } /** - * The to part of the range query. Null indicates unbounded. + * Gets the upper range value for this query. + * In case upper bound is assigned to a string, we internally convert it to a {@link BytesRef} because + * in {@link RangeQueryParser} field are later parsed as {@link BytesRef} and we need internal representation + * of query to be equal regardless of whether it was created from XContent or via Java API. */ - public RangeQueryBuilder to(String to) { - this.to = to; - return this; - } - - /** - * The to part of the range query. Null indicates unbounded. - */ - public RangeQueryBuilder to(int to) { - this.to = to; - return this; - } - - /** - * The to part of the range query. Null indicates unbounded. - */ - public RangeQueryBuilder to(long to) { - this.to = to; - return this; - } - - /** - * The to part of the range query. Null indicates unbounded. - */ - public RangeQueryBuilder to(float to) { - this.to = to; - return this; - } - - /** - * The to part of the range query. Null indicates unbounded. - */ - public RangeQueryBuilder to(double to) { - this.to = to; - return this; - } - - /** - * The to part of the range query. Null indicates unbounded. - */ - public RangeQueryBuilder lt(String to) { - this.to = to; - this.includeUpper = false; - return this; + public Object to() { + return convertToStringIfBytesRef(this.to); } /** * The to part of the range query. Null indicates unbounded. */ public RangeQueryBuilder lt(Object to) { - this.to = to; - this.includeUpper = false; - return this; - } - - /** - * The to part of the range query. Null indicates unbounded. - */ - public RangeQueryBuilder lt(int to) { - this.to = to; - this.includeUpper = false; - return this; - } - - /** - * The to part of the range query. Null indicates unbounded. - */ - public RangeQueryBuilder lt(long to) { - this.to = to; - this.includeUpper = false; - return this; - } - - /** - * The to part of the range query. Null indicates unbounded. - */ - public RangeQueryBuilder lt(float to) { - this.to = to; - this.includeUpper = false; - return this; - } - - /** - * The to part of the range query. Null indicates unbounded. - */ - public RangeQueryBuilder lt(double to) { - this.to = to; - this.includeUpper = false; - return this; - } - - /** - * The to part of the range query. Null indicates unbounded. - */ - public RangeQueryBuilder lte(String to) { - this.to = to; - this.includeUpper = true; - return this; + return to(to, false); } /** * The to part of the range query. Null indicates unbounded. */ public RangeQueryBuilder lte(Object to) { - this.to = to; - this.includeUpper = true; - return this; - } - - /** - * The to part of the range query. Null indicates unbounded. - */ - public RangeQueryBuilder lte(int to) { - this.to = to; - this.includeUpper = true; - return this; - } - - /** - * The to part of the range query. Null indicates unbounded. - */ - public RangeQueryBuilder lte(long to) { - this.to = to; - this.includeUpper = true; - return this; - } - - /** - * The to part of the range query. Null indicates unbounded. - */ - public RangeQueryBuilder lte(float to) { - this.to = to; - this.includeUpper = true; - return this; - } - - /** - * The to part of the range query. Null indicates unbounded. - */ - public RangeQueryBuilder lte(double to) { - this.to = to; - this.includeUpper = true; - return this; + return to(to, true); } /** @@ -367,6 +171,13 @@ public class RangeQueryBuilder extends MultiTermQueryBuilder implements Boostabl return this; } + /** + * Gets the includeLower flag for this query. + */ + public boolean includeLower() { + return this.includeLower; + } + /** * Should the upper bound be included or not. Defaults to true. */ @@ -376,60 +187,158 @@ public class RangeQueryBuilder extends MultiTermQueryBuilder implements Boostabl } /** - * Sets the boost for this query. Documents matching this query will (in addition to the normal - * weightings) have their score multiplied by the boost provided. + * Gets the includeUpper flag for this query. */ - @Override - public RangeQueryBuilder boost(float boost) { - this.boost = boost; - return this; - } - - /** - * Sets the query name for the filter that can be used when searching for matched_filters per hit. - */ - public RangeQueryBuilder queryName(String queryName) { - this.queryName = queryName; - return this; + public boolean includeUpper() { + return this.includeUpper; } /** * In case of date field, we can adjust the from/to fields using a timezone */ - public RangeQueryBuilder timeZone(String timezone) { - this.timeZone = timezone; + public RangeQueryBuilder timeZone(String timeZone) { + if (timeZone == null) { + throw new IllegalArgumentException("timezone cannot be null"); + } + this.timeZone = DateTimeZone.forID(timeZone); return this; } /** - * In case of date field, we can set the format to be used instead of the mapper format + * In case of date field, gets the from/to fields timezone adjustment + */ + public String timeZone() { + return this.timeZone == null ? null : this.timeZone.getID(); + } + + /** + * In case of format field, we can parse the from/to fields using this time format */ public RangeQueryBuilder format(String format) { - this.format = format; + if (format == null) { + throw new IllegalArgumentException("format cannot be null"); + } + this.format = Joda.forPattern(format); return this; } + /** + * Gets the format field to parse the from/to fields + */ + public String format() { + return this.format == null ? null : this.format.format(); + } + @Override protected void doXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(RangeQueryParser.NAME); - builder.startObject(name); - builder.field("from", from); - builder.field("to", to); - if (timeZone != null) { - builder.field("time_zone", timeZone); - } - if (format != null) { - builder.field("format", format); - } + builder.startObject(NAME); + builder.startObject(fieldName); + builder.field("from", convertToStringIfBytesRef(this.from)); + builder.field("to", convertToStringIfBytesRef(this.to)); builder.field("include_lower", includeLower); builder.field("include_upper", includeUpper); - if (boost != -1) { - builder.field("boost", boost); + if (timeZone != null) { + builder.field("time_zone", timeZone.getID()); } + if (format != null) { + builder.field("format", format.format()); + } + printBoostAndQueryName(builder); builder.endObject(); - if (queryName != null) { - builder.field("_name", queryName); - } builder.endObject(); } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + protected Query doToQuery(QueryShardContext context) throws IOException { + Query query = null; + MappedFieldType mapper = context.fieldMapper(this.fieldName); + if (mapper != null) { + if (mapper instanceof DateFieldMapper.DateFieldType) { + DateMathParser forcedDateParser = null; + if (this.format != null) { + forcedDateParser = new DateMathParser(this.format); + } + query = ((DateFieldMapper.DateFieldType) mapper).rangeQuery(from, to, includeLower, includeUpper, timeZone, forcedDateParser); + } else { + if (timeZone != null) { + throw new QueryShardException(context, "[range] time_zone can not be applied to non date field [" + + fieldName + "]"); + } + //LUCENE 4 UPGRADE Mapper#rangeQuery should use bytesref as well? + query = mapper.rangeQuery(from, to, includeLower, includeUpper); + } + } else { + if (timeZone != null) { + throw new QueryShardException(context, "[range] time_zone can not be applied to non unmapped field [" + + fieldName + "]"); + } + } + + if (query == null) { + query = new TermRangeQuery(this.fieldName, BytesRefs.toBytesRef(from), BytesRefs.toBytesRef(to), includeLower, includeUpper); + } + return query; + } + + @Override + protected RangeQueryBuilder doReadFrom(StreamInput in) throws IOException { + RangeQueryBuilder rangeQueryBuilder = new RangeQueryBuilder(in.readString()); + rangeQueryBuilder.from = in.readGenericValue(); + rangeQueryBuilder.to = in.readGenericValue(); + rangeQueryBuilder.includeLower = in.readBoolean(); + rangeQueryBuilder.includeUpper = in.readBoolean(); + String timeZoneId = in.readOptionalString(); + if (timeZoneId != null) { + rangeQueryBuilder.timeZone = DateTimeZone.forID(timeZoneId); + } + String formatString = in.readOptionalString(); + if (formatString != null) { + rangeQueryBuilder.format = Joda.forPattern(formatString); + } + return rangeQueryBuilder; + } + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + out.writeString(this.fieldName); + out.writeGenericValue(this.from); + out.writeGenericValue(this.to); + out.writeBoolean(this.includeLower); + out.writeBoolean(this.includeUpper); + String timeZoneId = null; + if (this.timeZone != null) { + timeZoneId = this.timeZone.getID(); + } + out.writeOptionalString(timeZoneId); + String formatString = null; + if (this.format != null) { + formatString = this.format.format(); + } + out.writeOptionalString(formatString); + } + + @Override + protected int doHashCode() { + String timeZoneId = timeZone == null ? null : timeZone.getID(); + String formatString = format == null ? null : format.format(); + return Objects.hash(fieldName, from, to, timeZoneId, includeLower, includeUpper, formatString); + } + + @Override + protected boolean doEquals(RangeQueryBuilder other) { + String timeZoneId = timeZone == null ? null : timeZone.getID(); + String formatString = format == null ? null : format.format(); + return Objects.equals(fieldName, other.fieldName) && + Objects.equals(from, other.from) && + Objects.equals(to, other.to) && + Objects.equals(timeZoneId, other.timeZone()) && + Objects.equals(includeLower, other.includeLower) && + Objects.equals(includeUpper, other.includeUpper) && + Objects.equals(formatString, other.format()); + } } diff --git a/core/src/main/java/org/elasticsearch/index/query/RangeQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/RangeQueryParser.java index ca37f5678fa..dcd07b3e4eb 100644 --- a/core/src/main/java/org/elasticsearch/index/query/RangeQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/RangeQueryParser.java @@ -19,52 +19,38 @@ package org.elasticsearch.index.query; -import org.apache.lucene.search.Query; -import org.apache.lucene.search.TermRangeQuery; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.joda.DateMathParser; -import org.elasticsearch.common.joda.Joda; -import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.mapper.core.DateFieldMapper; -import org.joda.time.DateTimeZone; import java.io.IOException; /** - * + * Parser for range query */ -public class RangeQueryParser implements QueryParser { +public class RangeQueryParser implements QueryParser { - public static final String NAME = "range"; private static final ParseField FIELDDATA_FIELD = new ParseField("fielddata").withAllDeprecated("[no replacement]"); private static final ParseField NAME_FIELD = new ParseField("_name").withAllDeprecated("query name is not supported in short version of range query"); - @Inject - public RangeQueryParser() { - } - @Override public String[] names() { - return new String[]{NAME}; + return new String[]{RangeQueryBuilder.NAME}; } @Override - public Query parse(QueryParseContext parseContext) throws IOException, ParsingException { + public RangeQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException { XContentParser parser = parseContext.parser(); String fieldName = null; Object from = null; Object to = null; - boolean includeLower = true; - boolean includeUpper = true; - DateTimeZone timeZone = null; - DateMathParser forcedDateParser = null; - float boost = 1.0f; + boolean includeLower = RangeQueryBuilder.DEFAULT_INCLUDE_LOWER; + boolean includeUpper = RangeQueryBuilder.DEFAULT_INCLUDE_UPPER; + String timeZone = null; + float boost = AbstractQueryBuilder.DEFAULT_BOOST; String queryName = null; + String format = null; String currentFieldName = null; XContentParser.Token token; @@ -102,11 +88,13 @@ public class RangeQueryParser implements QueryParser { to = parser.objectBytes(); includeUpper = true; } else if ("time_zone".equals(currentFieldName) || "timeZone".equals(currentFieldName)) { - timeZone = DateTimeZone.forID(parser.text()); + timeZone = parser.text(); } else if ("format".equals(currentFieldName)) { - forcedDateParser = new DateMathParser(Joda.forPattern(parser.text())); + format = parser.text(); + } else if ("_name".equals(currentFieldName)) { + queryName = parser.text(); } else { - throw new ParsingException(parseContext, "[range] query does not support [" + currentFieldName + "]"); + throw new ParsingException(parser.getTokenLocation(), "[range] query does not support [" + currentFieldName + "]"); } } } @@ -116,32 +104,29 @@ public class RangeQueryParser implements QueryParser { } else if (parseContext.parseFieldMatcher().match(currentFieldName, FIELDDATA_FIELD)) { // ignore } else { - throw new ParsingException(parseContext, "[range] query does not support [" + currentFieldName + "]"); + throw new ParsingException(parser.getTokenLocation(), "[range] query does not support [" + currentFieldName + "]"); } } } - Query query = null; - MappedFieldType mapper = parseContext.fieldMapper(fieldName); - if (mapper != null) { - if (mapper instanceof DateFieldMapper.DateFieldType) { - query = ((DateFieldMapper.DateFieldType) mapper).rangeQuery(from, to, includeLower, includeUpper, timeZone, forcedDateParser); - } else { - if (timeZone != null) { - throw new ParsingException(parseContext, "[range] time_zone can not be applied to non date field [" - + fieldName + "]"); - } - //LUCENE 4 UPGRADE Mapper#rangeQuery should use bytesref as well? - query = mapper.rangeQuery(from, to, includeLower, includeUpper); - } + RangeQueryBuilder rangeQuery = new RangeQueryBuilder(fieldName); + rangeQuery.from(from); + rangeQuery.to(to); + rangeQuery.includeLower(includeLower); + rangeQuery.includeUpper(includeUpper); + if (timeZone != null) { + rangeQuery.timeZone(timeZone); } - if (query == null) { - query = new TermRangeQuery(fieldName, BytesRefs.toBytesRef(from), BytesRefs.toBytesRef(to), includeLower, includeUpper); + rangeQuery.boost(boost); + rangeQuery.queryName(queryName); + if (format != null) { + rangeQuery.format(format); } - query.setBoost(boost); - if (queryName != null) { - parseContext.addNamedQuery(queryName, query); - } - return query; + return rangeQuery; + } + + @Override + public RangeQueryBuilder getBuilderPrototype() { + return RangeQueryBuilder.PROTOTYPE; } } diff --git a/core/src/main/java/org/elasticsearch/index/query/RegexpQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/RegexpQueryBuilder.java index ee143eba77c..f596bf84d5b 100644 --- a/core/src/main/java/org/elasticsearch/index/query/RegexpQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/RegexpQueryBuilder.java @@ -19,48 +19,79 @@ package org.elasticsearch.index.query; +import org.apache.lucene.index.Term; +import org.apache.lucene.search.MultiTermQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.RegexpQuery; import org.apache.lucene.util.automaton.Operations; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.query.support.QueryParsers; import java.io.IOException; +import java.util.Objects; /** * A Query that does fuzzy matching for a specific value. */ -public class RegexpQueryBuilder extends MultiTermQueryBuilder implements BoostableQueryBuilder { +public class RegexpQueryBuilder extends AbstractQueryBuilder implements MultiTermQueryBuilder { - private final String name; - private final String regexp; + public static final String NAME = "regexp"; + + public static final int DEFAULT_FLAGS_VALUE = RegexpFlag.ALL.value(); + + public static final int DEFAULT_MAX_DETERMINIZED_STATES = Operations.DEFAULT_MAX_DETERMINIZED_STATES; + + private final String fieldName; + + private final String value; + + private int flagsValue = DEFAULT_FLAGS_VALUE; + + private int maxDeterminizedStates = DEFAULT_MAX_DETERMINIZED_STATES; - private int flags = RegexpQueryParser.DEFAULT_FLAGS_VALUE; - private float boost = -1; private String rewrite; - private String queryName; - private int maxDeterminizedStates = Operations.DEFAULT_MAX_DETERMINIZED_STATES; - private boolean maxDetermizedStatesSet; + + static final RegexpQueryBuilder PROTOTYPE = new RegexpQueryBuilder("field", "value"); /** - * Constructs a new term query. + * Constructs a new regex query. * - * @param name The name of the field - * @param regexp The regular expression + * @param fieldName The name of the field + * @param value The regular expression */ - public RegexpQueryBuilder(String name, String regexp) { - this.name = name; - this.regexp = regexp; + public RegexpQueryBuilder(String fieldName, String value) { + if (Strings.isEmpty(fieldName)) { + throw new IllegalArgumentException("field name is null or empty"); + } + if (value == null) { + throw new IllegalArgumentException("value cannot be null."); + } + this.fieldName = fieldName; + this.value = value; + } + + /** Returns the field name used in this query. */ + public String fieldName() { + return this.fieldName; } /** - * Sets the boost for this query. Documents matching this query will (in addition to the normal - * weightings) have their score multiplied by the boost provided. + * Returns the value used in this query. */ - @Override - public RegexpQueryBuilder boost(float boost) { - this.boost = boost; - return this; + public String value() { + return this.value; } public RegexpQueryBuilder flags(RegexpFlag... flags) { + if (flags == null) { + this.flagsValue = DEFAULT_FLAGS_VALUE; + return this; + } int value = 0; if (flags.length == 0) { value = RegexpFlag.ALL.value; @@ -69,53 +100,108 @@ public class RegexpQueryBuilder extends MultiTermQueryBuilder implements Boostab value |= flag.value; } } - this.flags = value; + this.flagsValue = value; return this; } + public RegexpQueryBuilder flags(int flags) { + this.flagsValue = flags; + return this; + } + + public int flags() { + return this.flagsValue; + } + /** * Sets the regexp maxDeterminizedStates. */ public RegexpQueryBuilder maxDeterminizedStates(int value) { this.maxDeterminizedStates = value; - this.maxDetermizedStatesSet = true; return this; } + public int maxDeterminizedStates() { + return this.maxDeterminizedStates; + } + public RegexpQueryBuilder rewrite(String rewrite) { this.rewrite = rewrite; return this; } - /** - * Sets the query name for the filter that can be used when searching for matched_filters per hit. - */ - public RegexpQueryBuilder queryName(String queryName) { - this.queryName = queryName; - return this; + public String rewrite() { + return this.rewrite; } @Override public void doXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(RegexpQueryParser.NAME); - builder.startObject(name); - builder.field("value", regexp); - if (flags != -1) { - builder.field("flags_value", flags); - } - if (maxDetermizedStatesSet) { - builder.field("max_determinized_states", maxDeterminizedStates); - } - if (boost != -1) { - builder.field("boost", boost); - } + builder.startObject(NAME); + builder.startObject(fieldName); + builder.field("value", this.value); + builder.field("flags_value", flagsValue); + builder.field("max_determinized_states", maxDeterminizedStates); if (rewrite != null) { builder.field("rewrite", rewrite); } - if (queryName != null) { - builder.field("_name", queryName); - } + printBoostAndQueryName(builder); builder.endObject(); builder.endObject(); } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public Query doToQuery(QueryShardContext context) throws QueryShardException, IOException { + MultiTermQuery.RewriteMethod method = QueryParsers.parseRewriteMethod(context.parseFieldMatcher(), rewrite, null); + + Query query = null; + MappedFieldType fieldType = context.fieldMapper(fieldName); + if (fieldType != null) { + query = fieldType.regexpQuery(value, flagsValue, maxDeterminizedStates, method, context); + } + if (query == null) { + RegexpQuery regexpQuery = new RegexpQuery(new Term(fieldName, BytesRefs.toBytesRef(value)), flagsValue, maxDeterminizedStates); + if (method != null) { + regexpQuery.setRewriteMethod(method); + } + query = regexpQuery; + } + return query; + } + + @Override + public RegexpQueryBuilder doReadFrom(StreamInput in) throws IOException { + RegexpQueryBuilder regexpQueryBuilder = new RegexpQueryBuilder(in.readString(), in.readString()); + regexpQueryBuilder.flagsValue = in.readVInt(); + regexpQueryBuilder.maxDeterminizedStates = in.readVInt(); + regexpQueryBuilder.rewrite = in.readOptionalString(); + return regexpQueryBuilder; + } + + @Override + public void doWriteTo(StreamOutput out) throws IOException { + out.writeString(fieldName); + out.writeString(value); + out.writeVInt(flagsValue); + out.writeVInt(maxDeterminizedStates); + out.writeOptionalString(rewrite); + } + + @Override + public int doHashCode() { + return Objects.hash(fieldName, value, flagsValue, maxDeterminizedStates, rewrite); + } + + @Override + public boolean doEquals(RegexpQueryBuilder other) { + return Objects.equals(fieldName, other.fieldName) && + Objects.equals(value, other.value) && + Objects.equals(flagsValue, other.flagsValue) && + Objects.equals(maxDeterminizedStates, other.maxDeterminizedStates) && + Objects.equals(rewrite, other.rewrite); + } } diff --git a/core/src/main/java/org/elasticsearch/index/query/RegexpQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/RegexpQueryParser.java index ab105ba762f..d07c23da171 100644 --- a/core/src/main/java/org/elasticsearch/index/query/RegexpQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/RegexpQueryParser.java @@ -19,52 +19,35 @@ package org.elasticsearch.index.query; -import org.apache.lucene.index.Term; -import org.apache.lucene.search.MultiTermQuery; -import org.apache.lucene.search.Query; -import org.apache.lucene.search.RegexpQuery; -import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.query.support.QueryParsers; import java.io.IOException; /** - * + * Parser for regexp query */ -public class RegexpQueryParser implements QueryParser { - - public static final String NAME = "regexp"; - - public static final int DEFAULT_FLAGS_VALUE = RegexpFlag.ALL.value(); +public class RegexpQueryParser implements QueryParser { private static final ParseField NAME_FIELD = new ParseField("_name").withAllDeprecated("query name is not supported in short version of regexp query"); - @Inject - public RegexpQueryParser() { - } - @Override public String[] names() { - return new String[]{NAME}; + return new String[]{RegexpQueryBuilder.NAME}; } @Override - public Query parse(QueryParseContext parseContext) throws IOException, ParsingException { + public RegexpQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException { XContentParser parser = parseContext.parser(); String fieldName = parser.currentName(); - String rewriteMethod = null; + String rewrite = null; String value = null; - float boost = 1.0f; - int flagsValue = DEFAULT_FLAGS_VALUE; - int maxDeterminizedStates = Operations.DEFAULT_MAX_DETERMINIZED_STATES; + float boost = AbstractQueryBuilder.DEFAULT_BOOST; + int flagsValue = RegexpQueryBuilder.DEFAULT_FLAGS_VALUE; + int maxDeterminizedStates = RegexpQueryBuilder.DEFAULT_MAX_DETERMINIZED_STATES; String queryName = null; String currentFieldName = null; XContentParser.Token token; @@ -84,7 +67,7 @@ public class RegexpQueryParser implements QueryParser { } else if ("boost".equals(currentFieldName)) { boost = parser.floatValue(); } else if ("rewrite".equals(currentFieldName)) { - rewriteMethod = parser.textOrNull(); + rewrite = parser.textOrNull(); } else if ("flags".equals(currentFieldName)) { String flags = parser.textOrNull(); flagsValue = RegexpFlag.resolveValue(flags); @@ -95,7 +78,7 @@ public class RegexpQueryParser implements QueryParser { } else if ("_name".equals(currentFieldName)) { queryName = parser.text(); } else { - throw new ParsingException(parseContext, "[regexp] query does not support [" + currentFieldName + "]"); + throw new ParsingException(parser.getTokenLocation(), "[regexp] query does not support [" + currentFieldName + "]"); } } } @@ -110,29 +93,18 @@ public class RegexpQueryParser implements QueryParser { } if (value == null) { - throw new ParsingException(parseContext, "No value specified for regexp query"); + throw new ParsingException(parser.getTokenLocation(), "No value specified for regexp query"); } - - MultiTermQuery.RewriteMethod method = QueryParsers.parseRewriteMethod(parseContext.parseFieldMatcher(), rewriteMethod, null); - - Query query = null; - MappedFieldType fieldType = parseContext.fieldMapper(fieldName); - if (fieldType != null) { - query = fieldType.regexpQuery(value, flagsValue, maxDeterminizedStates, method, parseContext); - } - if (query == null) { - RegexpQuery regexpQuery = new RegexpQuery(new Term(fieldName, BytesRefs.toBytesRef(value)), flagsValue, maxDeterminizedStates); - if (method != null) { - regexpQuery.setRewriteMethod(method); - } - query = regexpQuery; - } - query.setBoost(boost); - if (queryName != null) { - parseContext.addNamedQuery(queryName, query); - } - return query; + return new RegexpQueryBuilder(fieldName, value) + .flags(flagsValue) + .maxDeterminizedStates(maxDeterminizedStates) + .rewrite(rewrite) + .boost(boost) + .queryName(queryName); } - + @Override + public RegexpQueryBuilder getBuilderPrototype() { + return RegexpQueryBuilder.PROTOTYPE; + } } diff --git a/core/src/main/java/org/elasticsearch/index/query/ScriptQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/ScriptQueryBuilder.java index a9a35ac96e2..45ab7454d6a 100644 --- a/core/src/main/java/org/elasticsearch/index/query/ScriptQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/ScriptQueryBuilder.java @@ -19,40 +19,149 @@ package org.elasticsearch.index.query; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.RandomAccessWeight; +import org.apache.lucene.search.Weight; +import org.apache.lucene.util.Bits; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.script.Script; +import org.elasticsearch.script.*; import org.elasticsearch.script.Script.ScriptField; +import org.elasticsearch.search.lookup.SearchLookup; import java.io.IOException; -import java.util.HashMap; -import java.util.Map; +import java.util.Objects; -public class ScriptQueryBuilder extends QueryBuilder { +public class ScriptQueryBuilder extends AbstractQueryBuilder { - private Script script; + public static final String NAME = "script"; - private String queryName; + static final ScriptQueryBuilder PROTOTYPE = new ScriptQueryBuilder(new Script("")); + + private final Script script; public ScriptQueryBuilder(Script script) { + if (script == null) { + throw new IllegalArgumentException("script cannot be null"); + } this.script = script; } - /** - * Sets the filter name for the filter that can be used when searching for matched_filters per hit. - */ - public ScriptQueryBuilder queryName(String queryName) { - this.queryName = queryName; - return this; + public Script script() { + return this.script; + } + + @Override + public String getWriteableName() { + return NAME; } @Override protected void doXContent(XContentBuilder builder, Params builderParams) throws IOException { - - builder.startObject(ScriptQueryParser.NAME); + builder.startObject(NAME); builder.field(ScriptField.SCRIPT.getPreferredName(), script); - if (queryName != null) { - builder.field("_name", queryName); - } + printBoostAndQueryName(builder); builder.endObject(); } + + @Override + protected Query doToQuery(QueryShardContext context) throws IOException { + return new ScriptQuery(script, context.scriptService(), context.lookup()); + } + + static class ScriptQuery extends Query { + + private final Script script; + + private final SearchScript searchScript; + + public ScriptQuery(Script script, ScriptService scriptService, SearchLookup searchLookup) { + this.script = script; + this.searchScript = scriptService.search(searchLookup, script, ScriptContext.Standard.SEARCH); + } + + @Override + public String toString(String field) { + StringBuilder buffer = new StringBuilder(); + buffer.append("ScriptFilter("); + buffer.append(script); + buffer.append(")"); + return buffer.toString(); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (!super.equals(obj)) + return false; + ScriptQuery other = (ScriptQuery) obj; + return Objects.equals(script, other.script); + } + + @Override + public int hashCode() { + final int prime = 31; + int result = super.hashCode(); + result = prime * result + Objects.hashCode(script); + return result; + } + + @Override + public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException { + return new RandomAccessWeight(this) { + @Override + protected Bits getMatchingDocs(final LeafReaderContext context) throws IOException { + final LeafSearchScript leafScript = searchScript.getLeafSearchScript(context); + return new Bits() { + + @Override + public boolean get(int doc) { + leafScript.setDocument(doc); + Object val = leafScript.run(); + if (val == null) { + return false; + } + if (val instanceof Boolean) { + return (Boolean) val; + } + if (val instanceof Number) { + return ((Number) val).longValue() != 0; + } + throw new IllegalArgumentException("Can't handle type [" + val + "] in script filter"); + } + + @Override + public int length() { + return context.reader().maxDoc(); + } + + }; + } + }; + } + } + + @Override + protected ScriptQueryBuilder doReadFrom(StreamInput in) throws IOException { + return new ScriptQueryBuilder(Script.readScript(in)); + } + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + script.writeTo(out); + } + + @Override + protected int doHashCode() { + return Objects.hash(script); + } + + @Override + protected boolean doEquals(ScriptQueryBuilder other) { + return Objects.equals(script, other.script); + } } \ No newline at end of file diff --git a/core/src/main/java/org/elasticsearch/index/query/ScriptQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/ScriptQueryParser.java index 31af574ace3..97ad0a21873 100644 --- a/core/src/main/java/org/elasticsearch/index/query/ScriptQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/ScriptQueryParser.java @@ -19,60 +19,41 @@ package org.elasticsearch.index.query; -import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.search.IndexSearcher; -import org.apache.lucene.search.Query; -import org.apache.lucene.search.RandomAccessWeight; -import org.apache.lucene.search.Weight; -import org.apache.lucene.util.Bits; import org.elasticsearch.common.ParsingException; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.script.LeafSearchScript; import org.elasticsearch.script.Script; import org.elasticsearch.script.Script.ScriptField; -import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptParameterParser; import org.elasticsearch.script.ScriptParameterParser.ScriptParameterValue; -import org.elasticsearch.script.ScriptService; -import org.elasticsearch.script.SearchScript; -import org.elasticsearch.search.lookup.SearchLookup; import java.io.IOException; import java.util.HashMap; import java.util.Map; -import java.util.Objects; /** - * + * Parser for script query */ -public class ScriptQueryParser implements QueryParser { - - public static final String NAME = "script"; - - @Inject - public ScriptQueryParser() { - } +public class ScriptQueryParser implements QueryParser { @Override public String[] names() { - return new String[] { NAME }; + return new String[]{ScriptQueryBuilder.NAME}; } @Override - public Query parse(QueryParseContext parseContext) throws IOException, ParsingException { + public ScriptQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException { XContentParser parser = parseContext.parser(); ScriptParameterParser scriptParameterParser = new ScriptParameterParser(); - XContentParser.Token token; - // also, when caching, since its isCacheable is false, will result in loading all bit set... Script script = null; Map params = null; + float boost = AbstractQueryBuilder.DEFAULT_BOOST; String queryName = null; - String currentFieldName = null; + XContentParser.Token token; + String currentFieldName = null; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); @@ -84,13 +65,15 @@ public class ScriptQueryParser implements QueryParser { } else if ("params".equals(currentFieldName)) { // TODO remove in 3.0 (here to support old script APIs) params = parser.map(); } else { - throw new ParsingException(parseContext, "[script] query does not support [" + currentFieldName + "]"); + throw new ParsingException(parser.getTokenLocation(), "[script] query does not support [" + currentFieldName + "]"); } } else if (token.isValue()) { if ("_name".equals(currentFieldName)) { queryName = parser.text(); + } else if ("boost".equals(currentFieldName)) { + boost = parser.floatValue(); } else if (!scriptParameterParser.token(currentFieldName, token, parser, parseContext.parseFieldMatcher())) { - throw new ParsingException(parseContext, "[script] query does not support [" + currentFieldName + "]"); + throw new ParsingException(parser.getTokenLocation(), "[script] query does not support [" + currentFieldName + "]"); } } } @@ -104,90 +87,20 @@ public class ScriptQueryParser implements QueryParser { script = new Script(scriptValue.script(), scriptValue.scriptType(), scriptParameterParser.lang(), params); } } else if (params != null) { - throw new ParsingException(parseContext, "script params must be specified inside script object in a [script] filter"); + throw new ParsingException(parser.getTokenLocation(), "script params must be specified inside script object in a [script] filter"); } if (script == null) { - throw new ParsingException(parseContext, "script must be provided with a [script] filter"); + throw new ParsingException(parser.getTokenLocation(), "script must be provided with a [script] filter"); } - Query query = new ScriptQuery(script, parseContext.scriptService(), parseContext.lookup()); - if (queryName != null) { - parseContext.addNamedQuery(queryName, query); - } - return query; + return new ScriptQueryBuilder(script) + .boost(boost) + .queryName(queryName); } - static class ScriptQuery extends Query { - - private final Script script; - - private final SearchScript searchScript; - - public ScriptQuery(Script script, ScriptService scriptService, SearchLookup searchLookup) { - this.script = script; - this.searchScript = scriptService.search(searchLookup, script, ScriptContext.Standard.SEARCH); - } - - @Override - public String toString(String field) { - StringBuilder buffer = new StringBuilder(); - buffer.append("ScriptFilter("); - buffer.append(script); - buffer.append(")"); - return buffer.toString(); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) - return true; - if (!super.equals(obj)) - return false; - ScriptQuery other = (ScriptQuery) obj; - return Objects.equals(script, other.script); - } - - @Override - public int hashCode() { - final int prime = 31; - int result = super.hashCode(); - result = prime * result + Objects.hashCode(script); - return result; - } - - @Override - public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException { - return new RandomAccessWeight(this) { - @Override - protected Bits getMatchingDocs(final LeafReaderContext context) throws IOException { - final LeafSearchScript leafScript = searchScript.getLeafSearchScript(context); - return new Bits() { - - @Override - public boolean get(int doc) { - leafScript.setDocument(doc); - Object val = leafScript.run(); - if (val == null) { - return false; - } - if (val instanceof Boolean) { - return (Boolean) val; - } - if (val instanceof Number) { - return ((Number) val).longValue() != 0; - } - throw new IllegalArgumentException("Can't handle type [" + val + "] in script filter"); - } - - @Override - public int length() { - return context.reader().maxDoc(); - } - - }; - } - }; - } + @Override + public ScriptQueryBuilder getBuilderPrototype() { + return ScriptQueryBuilder.PROTOTYPE; } } diff --git a/core/src/main/java/org/elasticsearch/index/query/SimpleQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/SimpleQueryParser.java index 9ae0703282f..f8b0deaf9be 100644 --- a/core/src/main/java/org/elasticsearch/index/query/SimpleQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/SimpleQueryParser.java @@ -29,6 +29,7 @@ import org.apache.lucene.util.BytesRef; import java.io.IOException; import java.util.Locale; import java.util.Map; +import java.util.Objects; /** * Wrapper class for Lucene's SimpleQueryParser that allows us to redefine @@ -201,51 +202,102 @@ public class SimpleQueryParser extends org.apache.lucene.queryparser.simple.Simp return new PrefixQuery(new Term(field, termStr)); } } - /** * Class encapsulating the settings for the SimpleQueryString query, with * their default values */ - public static class Settings { - private Locale locale = Locale.ROOT; - private boolean lowercaseExpandedTerms = true; - private boolean lenient = false; - private boolean analyzeWildcard = false; + static class Settings { + /** Locale to use for parsing. */ + private Locale locale = SimpleQueryStringBuilder.DEFAULT_LOCALE; + /** Specifies whether parsed terms should be lowercased. */ + private boolean lowercaseExpandedTerms = SimpleQueryStringBuilder.DEFAULT_LOWERCASE_EXPANDED_TERMS; + /** Specifies whether lenient query parsing should be used. */ + private boolean lenient = SimpleQueryStringBuilder.DEFAULT_LENIENT; + /** Specifies whether wildcards should be analyzed. */ + private boolean analyzeWildcard = SimpleQueryStringBuilder.DEFAULT_ANALYZE_WILDCARD; + /** + * Generates default {@link Settings} object (uses ROOT locale, does + * lowercase terms, no lenient parsing, no wildcard analysis). + * */ public Settings() { - } - public void locale(Locale locale) { + public Settings(Locale locale, Boolean lowercaseExpandedTerms, Boolean lenient, Boolean analyzeWildcard) { this.locale = locale; + this.lowercaseExpandedTerms = lowercaseExpandedTerms; + this.lenient = lenient; + this.analyzeWildcard = analyzeWildcard; } + /** Specifies the locale to use for parsing, Locale.ROOT by default. */ + public void locale(Locale locale) { + this.locale = (locale != null) ? locale : SimpleQueryStringBuilder.DEFAULT_LOCALE; + } + + /** Returns the locale to use for parsing. */ public Locale locale() { return this.locale; } + /** + * Specifies whether to lowercase parse terms, defaults to true if + * unset. + */ public void lowercaseExpandedTerms(boolean lowercaseExpandedTerms) { this.lowercaseExpandedTerms = lowercaseExpandedTerms; } + /** Returns whether to lowercase parse terms. */ public boolean lowercaseExpandedTerms() { return this.lowercaseExpandedTerms; } + /** Specifies whether to use lenient parsing, defaults to false. */ public void lenient(boolean lenient) { this.lenient = lenient; } + /** Returns whether to use lenient parsing. */ public boolean lenient() { return this.lenient; } + /** Specifies whether to analyze wildcards. Defaults to false if unset. */ public void analyzeWildcard(boolean analyzeWildcard) { this.analyzeWildcard = analyzeWildcard; } + /** Returns whether to analyze wildcards. */ public boolean analyzeWildcard() { return analyzeWildcard; } + + @Override + public int hashCode() { + // checking the return value of toLanguageTag() for locales only. + // For further reasoning see + // https://issues.apache.org/jira/browse/LUCENE-4021 + return Objects.hash(locale.toLanguageTag(), lowercaseExpandedTerms, lenient, analyzeWildcard); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + Settings other = (Settings) obj; + + // checking the return value of toLanguageTag() for locales only. + // For further reasoning see + // https://issues.apache.org/jira/browse/LUCENE-4021 + return (Objects.equals(locale.toLanguageTag(), other.locale.toLanguageTag()) + && Objects.equals(lowercaseExpandedTerms, other.lowercaseExpandedTerms) + && Objects.equals(lenient, other.lenient) + && Objects.equals(analyzeWildcard, other.analyzeWildcard)); + } } } diff --git a/core/src/main/java/org/elasticsearch/index/query/SimpleQueryStringBuilder.java b/core/src/main/java/org/elasticsearch/index/query/SimpleQueryStringBuilder.java index 700ad419e9f..3f8cc5d7e23 100644 --- a/core/src/main/java/org/elasticsearch/index/query/SimpleQueryStringBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/SimpleQueryStringBuilder.java @@ -19,202 +19,380 @@ package org.elasticsearch.index.query; -import org.elasticsearch.common.xcontent.ToXContent.Params; +import org.apache.lucene.analysis.Analyzer; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.Query; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.lucene.search.Queries; +import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.query.SimpleQueryParser.Settings; import java.io.IOException; import java.util.HashMap; import java.util.Locale; import java.util.Map; +import java.util.Objects; +import java.util.TreeMap; /** - * SimpleQuery is a query parser that acts similar to a query_string - * query, but won't throw exceptions for any weird string syntax. + * SimpleQuery is a query parser that acts similar to a query_string query, but + * won't throw exceptions for any weird string syntax. + * + * For more detailed explanation of the query string syntax see also the online documentation. */ -public class SimpleQueryStringBuilder extends QueryBuilder implements BoostableQueryBuilder { - private Map fields = new HashMap<>(); - private String analyzer; - private Operator operator; +public class SimpleQueryStringBuilder extends AbstractQueryBuilder { + /** Default locale used for parsing.*/ + public static final Locale DEFAULT_LOCALE = Locale.ROOT; + /** Default for lowercasing parsed terms.*/ + public static final boolean DEFAULT_LOWERCASE_EXPANDED_TERMS = true; + /** Default for using lenient query parsing.*/ + public static final boolean DEFAULT_LENIENT = false; + /** Default for wildcard analysis.*/ + public static final boolean DEFAULT_ANALYZE_WILDCARD = false; + /** Default for default operator to use for linking boolean clauses.*/ + public static final Operator DEFAULT_OPERATOR = Operator.OR; + /** Default for search flags to use. */ + public static final int DEFAULT_FLAGS = SimpleQueryStringFlag.ALL.value; + /** Name for (de-)serialization. */ + public static final String NAME = "simple_query_string"; + + static final SimpleQueryStringBuilder PROTOTYPE = new SimpleQueryStringBuilder(""); + + /** Query text to parse. */ private final String queryText; - private String queryName; + /** + * Fields to query against. If left empty will query default field, + * currently _ALL. Uses a TreeMap to hold the fields so boolean clauses are + * always sorted in same order for generated Lucene query for easier + * testing. + * + * Can be changed back to HashMap once https://issues.apache.org/jira/browse/LUCENE-6305 is fixed. + */ + private final Map fieldsAndWeights = new TreeMap<>(); + /** If specified, analyzer to use to parse the query text, defaults to registered default in toQuery. */ + private String analyzer; + /** Default operator to use for linking boolean clauses. Defaults to OR according to docs. */ + private Operator defaultOperator = DEFAULT_OPERATOR; + /** If result is a boolean query, minimumShouldMatch parameter to apply. Ignored otherwise. */ private String minimumShouldMatch; - private int flags = -1; - private float boost = -1.0f; - private Boolean lowercaseExpandedTerms; - private Boolean lenient; - private Boolean analyzeWildcard; - private Locale locale; + /** Any search flags to be used, ALL by default. */ + private int flags = DEFAULT_FLAGS; - /** - * Operators for the default_operator - */ - public static enum Operator { - AND, - OR + /** Further search settings needed by the ES specific query string parser only. */ + private Settings settings = new Settings(); + + /** Construct a new simple query with this query string. */ + public SimpleQueryStringBuilder(String queryText) { + if (queryText == null) { + throw new IllegalArgumentException("query text missing"); + } + this.queryText = queryText; } - /** - * Construct a new simple query with the given text - */ - public SimpleQueryStringBuilder(String text) { - this.queryText = text; + /** Returns the text to parse the query from. */ + public String value() { + return this.queryText; } - /** Set the boost of this query. */ - @Override - public SimpleQueryStringBuilder boost(float boost) { - this.boost = boost; - return this; - } - - /** Returns the boost of this query. */ - public float boost() { - return this.boost; - } - - /** - * Add a field to run the query against - */ + /** Add a field to run the query against. */ public SimpleQueryStringBuilder field(String field) { - this.fields.put(field, null); + if (Strings.isEmpty(field)) { + throw new IllegalArgumentException("supplied field is null or empty."); + } + this.fieldsAndWeights.put(field, AbstractQueryBuilder.DEFAULT_BOOST); return this; } - /** - * Add a field to run the query against with a specific boost - */ + /** Add a field to run the query against with a specific boost. */ public SimpleQueryStringBuilder field(String field, float boost) { - this.fields.put(field, boost); + if (Strings.isEmpty(field)) { + throw new IllegalArgumentException("supplied field is null or empty."); + } + this.fieldsAndWeights.put(field, boost); return this; } - /** - * Specify a name for the query - */ - public SimpleQueryStringBuilder queryName(String name) { - this.queryName = name; + /** Add several fields to run the query against with a specific boost. */ + public SimpleQueryStringBuilder fields(Map fields) { + this.fieldsAndWeights.putAll(fields); return this; } - /** - * Specify an analyzer to use for the query - */ + /** Returns the fields including their respective boosts to run the query against. */ + public Map fields() { + return this.fieldsAndWeights; + } + + /** Specify an analyzer to use for the query. */ public SimpleQueryStringBuilder analyzer(String analyzer) { this.analyzer = analyzer; return this; } - /** - * Specify the default operator for the query. Defaults to "OR" if no - * operator is specified - */ - public SimpleQueryStringBuilder defaultOperator(Operator defaultOperator) { - this.operator = defaultOperator; - return this; + /** Returns the analyzer to use for the query. */ + public String analyzer() { + return this.analyzer; } /** - * Specify the enabled features of the SimpleQueryString. + * Specify the default operator for the query. Defaults to "OR" if no + * operator is specified. + */ + public SimpleQueryStringBuilder defaultOperator(Operator defaultOperator) { + this.defaultOperator = (defaultOperator != null) ? defaultOperator : DEFAULT_OPERATOR; + return this; + } + + /** Returns the default operator for the query. */ + public Operator defaultOperator() { + return this.defaultOperator; + } + + /** + * Specify the enabled features of the SimpleQueryString. Defaults to ALL if + * none are specified. */ public SimpleQueryStringBuilder flags(SimpleQueryStringFlag... flags) { - int value = 0; - if (flags.length == 0) { - value = SimpleQueryStringFlag.ALL.value; - } else { + if (flags != null && flags.length > 0) { + int value = 0; for (SimpleQueryStringFlag flag : flags) { value |= flag.value; } + this.flags = value; + } else { + this.flags = DEFAULT_FLAGS; } - this.flags = value; + return this; } + /** For testing and serialisation only. */ + SimpleQueryStringBuilder flags(int flags) { + this.flags = flags; + return this; + } + + /** For testing only: Return the flags set for this query. */ + int flags() { + return this.flags; + } + + /** + * Specifies whether parsed terms for this query should be lower-cased. + * Defaults to true if not set. + */ public SimpleQueryStringBuilder lowercaseExpandedTerms(boolean lowercaseExpandedTerms) { - this.lowercaseExpandedTerms = lowercaseExpandedTerms; + this.settings.lowercaseExpandedTerms(lowercaseExpandedTerms); return this; } + /** Returns whether parsed terms should be lower cased for this query. */ + public boolean lowercaseExpandedTerms() { + return this.settings.lowercaseExpandedTerms(); + } + + /** Specifies the locale for parsing terms. Defaults to ROOT if none is set. */ public SimpleQueryStringBuilder locale(Locale locale) { - this.locale = locale; + this.settings.locale(locale); return this; } + /** Returns the locale for parsing terms for this query. */ + public Locale locale() { + return this.settings.locale(); + } + + /** Specifies whether query parsing should be lenient. Defaults to false. */ public SimpleQueryStringBuilder lenient(boolean lenient) { - this.lenient = lenient; + this.settings.lenient(lenient); return this; } + /** Returns whether query parsing should be lenient. */ + public boolean lenient() { + return this.settings.lenient(); + } + + /** Specifies whether wildcards should be analyzed. Defaults to false. */ public SimpleQueryStringBuilder analyzeWildcard(boolean analyzeWildcard) { - this.analyzeWildcard = analyzeWildcard; + this.settings.analyzeWildcard(analyzeWildcard); return this; } + /** Returns whether wildcards should by analyzed. */ + public boolean analyzeWildcard() { + return this.settings.analyzeWildcard(); + } + + /** + * Specifies the minimumShouldMatch to apply to the resulting query should + * that be a Boolean query. + */ public SimpleQueryStringBuilder minimumShouldMatch(String minimumShouldMatch) { this.minimumShouldMatch = minimumShouldMatch; return this; } + /** + * Returns the minimumShouldMatch to apply to the resulting query should + * that be a Boolean query. + */ + public String minimumShouldMatch() { + return minimumShouldMatch; + } + @Override - public void doXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(SimpleQueryStringParser.NAME); + protected Query doToQuery(QueryShardContext context) throws IOException { + // field names in builder can have wildcards etc, need to resolve them here + Map resolvedFieldsAndWeights = new TreeMap<>(); + // Use the default field if no fields specified + if (fieldsAndWeights.isEmpty()) { + resolvedFieldsAndWeights.put(resolveIndexName(context.defaultField(), context), AbstractQueryBuilder.DEFAULT_BOOST); + } else { + for (Map.Entry fieldEntry : fieldsAndWeights.entrySet()) { + if (Regex.isSimpleMatchPattern(fieldEntry.getKey())) { + for (String fieldName : context.mapperService().simpleMatchToIndexNames(fieldEntry.getKey())) { + resolvedFieldsAndWeights.put(fieldName, fieldEntry.getValue()); + } + } else { + resolvedFieldsAndWeights.put(resolveIndexName(fieldEntry.getKey(), context), fieldEntry.getValue()); + } + } + } + + // Use standard analyzer by default if none specified + Analyzer luceneAnalyzer; + if (analyzer == null) { + luceneAnalyzer = context.mapperService().searchAnalyzer(); + } else { + luceneAnalyzer = context.analysisService().analyzer(analyzer); + if (luceneAnalyzer == null) { + throw new QueryShardException(context, "[" + SimpleQueryStringBuilder.NAME + "] analyzer [" + analyzer + + "] not found"); + } + + } + + SimpleQueryParser sqp = new SimpleQueryParser(luceneAnalyzer, resolvedFieldsAndWeights, flags, settings); + sqp.setDefaultOperator(defaultOperator.toBooleanClauseOccur()); + + Query query = sqp.parse(queryText); + if (minimumShouldMatch != null && query instanceof BooleanQuery) { + query = Queries.applyMinimumShouldMatch((BooleanQuery) query, minimumShouldMatch); + } + return query; + } + + private static String resolveIndexName(String fieldName, QueryShardContext context) { + MappedFieldType fieldType = context.fieldMapper(fieldName); + if (fieldType != null) { + return fieldType.names().indexName(); + } + return fieldName; + } + + @Override + protected void setFinalBoost(Query query) { + query.setBoost(boost * query.getBoost()); + } + + @Override + protected void doXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(NAME); builder.field("query", queryText); - if (fields.size() > 0) { + if (fieldsAndWeights.size() > 0) { builder.startArray("fields"); - for (Map.Entry entry : fields.entrySet()) { - String field = entry.getKey(); - Float boost = entry.getValue(); - if (boost != null) { - builder.value(field + "^" + boost); - } else { - builder.value(field); - } + for (Map.Entry entry : fieldsAndWeights.entrySet()) { + builder.value(entry.getKey() + "^" + entry.getValue()); } builder.endArray(); } - if (flags != -1) { - builder.field("flags", flags); - } - if (analyzer != null) { builder.field("analyzer", analyzer); } - if (operator != null) { - builder.field("default_operator", operator.name().toLowerCase(Locale.ROOT)); - } - - if (lowercaseExpandedTerms != null) { - builder.field("lowercase_expanded_terms", lowercaseExpandedTerms); - } - - if (lenient != null) { - builder.field("lenient", lenient); - } - - if (analyzeWildcard != null) { - builder.field("analyze_wildcard", analyzeWildcard); - } - - if (locale != null) { - builder.field("locale", locale.toString()); - } - - if (queryName != null) { - builder.field("_name", queryName); - } + builder.field("flags", flags); + builder.field("default_operator", defaultOperator.name().toLowerCase(Locale.ROOT)); + builder.field("lowercase_expanded_terms", settings.lowercaseExpandedTerms()); + builder.field("lenient", settings.lenient()); + builder.field("analyze_wildcard", settings.analyzeWildcard()); + builder.field("locale", (settings.locale().toLanguageTag())); if (minimumShouldMatch != null) { builder.field("minimum_should_match", minimumShouldMatch); } - - if (boost != -1.0f) { - builder.field("boost", boost); - } + printBoostAndQueryName(builder); builder.endObject(); } + @Override + public String getWriteableName() { + return NAME; + } + + @Override + protected SimpleQueryStringBuilder doReadFrom(StreamInput in) throws IOException { + SimpleQueryStringBuilder result = new SimpleQueryStringBuilder(in.readString()); + int size = in.readInt(); + Map fields = new HashMap<>(); + for (int i = 0; i < size; i++) { + String field = in.readString(); + Float weight = in.readFloat(); + fields.put(field, weight); + } + result.fieldsAndWeights.putAll(fields); + result.flags = in.readInt(); + result.analyzer = in.readOptionalString(); + result.defaultOperator = Operator.readOperatorFrom(in); + result.settings.lowercaseExpandedTerms(in.readBoolean()); + result.settings.lenient(in.readBoolean()); + result.settings.analyzeWildcard(in.readBoolean()); + String localeStr = in.readString(); + result.settings.locale(Locale.forLanguageTag(localeStr)); + result.minimumShouldMatch = in.readOptionalString(); + return result; + } + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + out.writeString(queryText); + out.writeInt(fieldsAndWeights.size()); + for (Map.Entry entry : fieldsAndWeights.entrySet()) { + out.writeString(entry.getKey()); + out.writeFloat(entry.getValue()); + } + out.writeInt(flags); + out.writeOptionalString(analyzer); + defaultOperator.writeTo(out); + out.writeBoolean(settings.lowercaseExpandedTerms()); + out.writeBoolean(settings.lenient()); + out.writeBoolean(settings.analyzeWildcard()); + out.writeString(settings.locale().toLanguageTag()); + out.writeOptionalString(minimumShouldMatch); + } + + @Override + protected int doHashCode() { + return Objects.hash(fieldsAndWeights, analyzer, defaultOperator, queryText, minimumShouldMatch, settings, flags); + } + + @Override + protected boolean doEquals(SimpleQueryStringBuilder other) { + return Objects.equals(fieldsAndWeights, other.fieldsAndWeights) && Objects.equals(analyzer, other.analyzer) + && Objects.equals(defaultOperator, other.defaultOperator) && Objects.equals(queryText, other.queryText) + && Objects.equals(minimumShouldMatch, other.minimumShouldMatch) + && Objects.equals(settings, other.settings) && (flags == other.flags); + } } + diff --git a/core/src/main/java/org/elasticsearch/index/query/SimpleQueryStringFlag.java b/core/src/main/java/org/elasticsearch/index/query/SimpleQueryStringFlag.java index ce0ce88a9e4..68d19db7cc6 100644 --- a/core/src/main/java/org/elasticsearch/index/query/SimpleQueryStringFlag.java +++ b/core/src/main/java/org/elasticsearch/index/query/SimpleQueryStringFlag.java @@ -71,7 +71,7 @@ public enum SimpleQueryStringFlag { magic |= flag.value(); } } catch (IllegalArgumentException iae) { - throw new IllegalArgumentException("Unknown " + SimpleQueryStringParser.NAME + " flag [" + s + "]"); + throw new IllegalArgumentException("Unknown " + SimpleQueryStringBuilder.NAME + " flag [" + s + "]"); } } return magic; diff --git a/core/src/main/java/org/elasticsearch/index/query/SimpleQueryStringParser.java b/core/src/main/java/org/elasticsearch/index/query/SimpleQueryStringParser.java index 4207f93fa7d..c803d440055 100644 --- a/core/src/main/java/org/elasticsearch/index/query/SimpleQueryStringParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/SimpleQueryStringParser.java @@ -19,21 +19,11 @@ package org.elasticsearch.index.query; -import org.apache.lucene.analysis.Analyzer; -import org.apache.lucene.search.BooleanClause; -import org.apache.lucene.search.BooleanQuery; -import org.apache.lucene.search.Query; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.lucene.search.Queries; -import org.elasticsearch.common.regex.Regex; -import org.elasticsearch.common.util.LocaleUtils; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.mapper.MappedFieldType; import java.io.IOException; -import java.util.Collections; import java.util.HashMap; import java.util.Locale; import java.util.Map; @@ -68,34 +58,30 @@ import java.util.Map; * {@code fields} - fields to search, defaults to _all if not set, allows * boosting a field with ^n */ -public class SimpleQueryStringParser implements QueryParser { - - public static final String NAME = "simple_query_string"; - - @Inject - public SimpleQueryStringParser() { - - } +public class SimpleQueryStringParser implements QueryParser { @Override public String[] names() { - return new String[]{NAME, Strings.toCamelCase(NAME)}; + return new String[]{SimpleQueryStringBuilder.NAME, Strings.toCamelCase(SimpleQueryStringBuilder.NAME)}; } @Override - public Query parse(QueryParseContext parseContext) throws IOException, ParsingException { + public SimpleQueryStringBuilder fromXContent(QueryParseContext parseContext) throws IOException { XContentParser parser = parseContext.parser(); String currentFieldName = null; String queryBody = null; - float boost = 1.0f; + float boost = AbstractQueryBuilder.DEFAULT_BOOST; String queryName = null; String minimumShouldMatch = null; - Map fieldsAndWeights = null; - BooleanClause.Occur defaultOperator = null; - Analyzer analyzer = null; - int flags = -1; - SimpleQueryParser.Settings sqsSettings = new SimpleQueryParser.Settings(); + Map fieldsAndWeights = new HashMap<>(); + Operator defaultOperator = null; + String analyzerName = null; + int flags = SimpleQueryStringFlag.ALL.value(); + boolean lenient = SimpleQueryStringBuilder.DEFAULT_LENIENT; + boolean lowercaseExpandedTerms = SimpleQueryStringBuilder.DEFAULT_LOWERCASE_EXPANDED_TERMS; + boolean analyzeWildcard = SimpleQueryStringBuilder.DEFAULT_ANALYZE_WILDCARD; + Locale locale = null; XContentParser.Token token; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { @@ -119,26 +105,10 @@ public class SimpleQueryStringParser implements QueryParser { if (fField == null) { fField = parser.text(); } - - if (fieldsAndWeights == null) { - fieldsAndWeights = new HashMap<>(); - } - - if (Regex.isSimpleMatchPattern(fField)) { - for (String fieldName : parseContext.mapperService().simpleMatchToIndexNames(fField)) { - fieldsAndWeights.put(fieldName, fBoost); - } - } else { - MappedFieldType fieldType = parseContext.fieldMapper(fField); - if (fieldType != null) { - fieldsAndWeights.put(fieldType.names().indexName(), fBoost); - } else { - fieldsAndWeights.put(fField, fBoost); - } - } + fieldsAndWeights.put(fField, fBoost); } } else { - throw new ParsingException(parseContext, "[" + NAME + "] query does not support [" + currentFieldName + "]"); + throw new ParsingException(parser.getTokenLocation(), "[" + SimpleQueryStringBuilder.NAME + "] query does not support [" + currentFieldName + "]"); } } else if (token.isValue()) { if ("query".equals(currentFieldName)) { @@ -146,19 +116,9 @@ public class SimpleQueryStringParser implements QueryParser { } else if ("boost".equals(currentFieldName)) { boost = parser.floatValue(); } else if ("analyzer".equals(currentFieldName)) { - analyzer = parseContext.analysisService().analyzer(parser.text()); - if (analyzer == null) { - throw new ParsingException(parseContext, "[" + NAME + "] analyzer [" + parser.text() + "] not found"); - } + analyzerName = parser.text(); } else if ("default_operator".equals(currentFieldName) || "defaultOperator".equals(currentFieldName)) { - String op = parser.text(); - if ("or".equalsIgnoreCase(op)) { - defaultOperator = BooleanClause.Occur.SHOULD; - } else if ("and".equalsIgnoreCase(op)) { - defaultOperator = BooleanClause.Occur.MUST; - } else { - throw new ParsingException(parseContext, "[" + NAME + "] default operator [" + op + "] is not allowed"); - } + defaultOperator = Operator.fromString(parser.text()); } else if ("flags".equals(currentFieldName)) { if (parser.currentToken() != XContentParser.Token.VALUE_NUMBER) { // Possible options are: @@ -172,56 +132,37 @@ public class SimpleQueryStringParser implements QueryParser { } } else if ("locale".equals(currentFieldName)) { String localeStr = parser.text(); - Locale locale = LocaleUtils.parse(localeStr); - sqsSettings.locale(locale); + locale = Locale.forLanguageTag(localeStr); } else if ("lowercase_expanded_terms".equals(currentFieldName)) { - sqsSettings.lowercaseExpandedTerms(parser.booleanValue()); + lowercaseExpandedTerms = parser.booleanValue(); } else if ("lenient".equals(currentFieldName)) { - sqsSettings.lenient(parser.booleanValue()); + lenient = parser.booleanValue(); } else if ("analyze_wildcard".equals(currentFieldName)) { - sqsSettings.analyzeWildcard(parser.booleanValue()); + analyzeWildcard = parser.booleanValue(); } else if ("_name".equals(currentFieldName)) { queryName = parser.text(); } else if ("minimum_should_match".equals(currentFieldName)) { minimumShouldMatch = parser.textOrNull(); } else { - throw new ParsingException(parseContext, "[" + NAME + "] unsupported field [" + parser.currentName() + "]"); + throw new ParsingException(parser.getTokenLocation(), "[" + SimpleQueryStringBuilder.NAME + "] unsupported field [" + parser.currentName() + "]"); } } } // Query text is required if (queryBody == null) { - throw new ParsingException(parseContext, "[" + NAME + "] query text missing"); + throw new ParsingException(parser.getTokenLocation(), "[" + SimpleQueryStringBuilder.NAME + "] query text missing"); } - // Use standard analyzer by default - if (analyzer == null) { - analyzer = parseContext.mapperService().searchAnalyzer(); - } + SimpleQueryStringBuilder qb = new SimpleQueryStringBuilder(queryBody); + qb.boost(boost).fields(fieldsAndWeights).analyzer(analyzerName).queryName(queryName).minimumShouldMatch(minimumShouldMatch); + qb.flags(flags).defaultOperator(defaultOperator).locale(locale).lowercaseExpandedTerms(lowercaseExpandedTerms); + qb.lenient(lenient).analyzeWildcard(analyzeWildcard).boost(boost); + return qb; + } - if (fieldsAndWeights == null) { - fieldsAndWeights = Collections.singletonMap(parseContext.defaultField(), 1.0F); - } - SimpleQueryParser sqp = new SimpleQueryParser(analyzer, fieldsAndWeights, flags, sqsSettings); - - if (defaultOperator != null) { - sqp.setDefaultOperator(defaultOperator); - } - - Query query = sqp.parse(queryBody); - if (queryName != null) { - parseContext.addNamedQuery(queryName, query); - } - - if (minimumShouldMatch != null && query instanceof BooleanQuery) { - query = Queries.applyMinimumShouldMatch((BooleanQuery) query, minimumShouldMatch); - } - - if (query != null) { - query.setBoost(boost * query.getBoost()); - } - - return query; + @Override + public SimpleQueryStringBuilder getBuilderPrototype() { + return SimpleQueryStringBuilder.PROTOTYPE; } } diff --git a/core/src/main/java/org/elasticsearch/index/query/SpanContainingQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/SpanContainingQueryBuilder.java index 0b7a3cd92c9..485ffff9f90 100644 --- a/core/src/main/java/org/elasticsearch/index/query/SpanContainingQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/SpanContainingQueryBuilder.java @@ -19,74 +19,101 @@ package org.elasticsearch.index.query; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.spans.SpanContainingQuery; +import org.apache.lucene.search.spans.SpanQuery; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import java.io.IOException; +import java.util.Objects; /** * Builder for {@link org.apache.lucene.search.spans.SpanContainingQuery}. */ -public class SpanContainingQueryBuilder extends SpanQueryBuilder implements BoostableQueryBuilder { +public class SpanContainingQueryBuilder extends AbstractQueryBuilder implements SpanQueryBuilder { - private SpanQueryBuilder big; - private SpanQueryBuilder little; - private float boost = -1; - private String queryName; + public static final String NAME = "span_containing"; + private final SpanQueryBuilder big; + private final SpanQueryBuilder little; + static final SpanContainingQueryBuilder PROTOTYPE = new SpanContainingQueryBuilder(SpanTermQueryBuilder.PROTOTYPE, SpanTermQueryBuilder.PROTOTYPE); - /** - * Sets the little clause, it must be contained within {@code big} for a match. + /** + * @param big the big clause, it must enclose {@code little} for a match. + * @param little the little clause, it must be contained within {@code big} for a match. */ - public SpanContainingQueryBuilder little(SpanQueryBuilder clause) { - this.little = clause; - return this; - } - - /** - * Sets the big clause, it must enclose {@code little} for a match. - */ - public SpanContainingQueryBuilder big(SpanQueryBuilder clause) { - this.big = clause; - return this; - } - - @Override - public SpanContainingQueryBuilder boost(float boost) { - this.boost = boost; - return this; + public SpanContainingQueryBuilder(SpanQueryBuilder big, SpanQueryBuilder little) { + if (big == null) { + throw new IllegalArgumentException("inner clause [big] cannot be null."); + } + if (little == null) { + throw new IllegalArgumentException("inner clause [little] cannot be null."); + } + this.little = little; + this.big = big; } /** - * Sets the query name for the filter that can be used when searching for matched_filters per hit. + * @return the big clause, it must enclose {@code little} for a match. */ - public SpanContainingQueryBuilder queryName(String queryName) { - this.queryName = queryName; - return this; + public SpanQueryBuilder bigQuery() { + return this.big; + } + + /** + * @return the little clause, it must be contained within {@code big} for a match. + */ + public SpanQueryBuilder littleQuery() { + return this.little; } @Override protected void doXContent(XContentBuilder builder, Params params) throws IOException { - if (big == null) { - throw new IllegalArgumentException("Must specify big clause when building a span_containing query"); - } - if (little == null) { - throw new IllegalArgumentException("Must specify little clause when building a span_containing query"); - } - builder.startObject(SpanContainingQueryParser.NAME); - + builder.startObject(NAME); builder.field("big"); big.toXContent(builder, params); - builder.field("little"); little.toXContent(builder, params); - - if (boost != -1) { - builder.field("boost", boost); - } - - if (queryName != null) { - builder.field("_name", queryName); - } - + printBoostAndQueryName(builder); builder.endObject(); } + + @Override + protected Query doToQuery(QueryShardContext context) throws IOException { + Query innerBig = big.toQuery(context); + assert innerBig instanceof SpanQuery; + Query innerLittle = little.toQuery(context); + assert innerLittle instanceof SpanQuery; + return new SpanContainingQuery((SpanQuery) innerBig, (SpanQuery) innerLittle); + } + + @Override + protected SpanContainingQueryBuilder doReadFrom(StreamInput in) throws IOException { + SpanQueryBuilder big = (SpanQueryBuilder)in.readQuery(); + SpanQueryBuilder little = (SpanQueryBuilder)in.readQuery(); + return new SpanContainingQueryBuilder(big, little); + } + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + out.writeQuery(big); + out.writeQuery(little); + } + + @Override + protected int doHashCode() { + return Objects.hash(big, little); + } + + @Override + protected boolean doEquals(SpanContainingQueryBuilder other) { + return Objects.equals(big, other.big) && + Objects.equals(little, other.little); + } + + @Override + public String getWriteableName() { + return NAME; + } } diff --git a/core/src/main/java/org/elasticsearch/index/query/SpanContainingQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/SpanContainingQueryParser.java index c172748fbd3..51dddac19e4 100644 --- a/core/src/main/java/org/elasticsearch/index/query/SpanContainingQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/SpanContainingQueryParser.java @@ -19,40 +19,29 @@ package org.elasticsearch.index.query; -import org.apache.lucene.search.Query; -import org.apache.lucene.search.spans.SpanContainingQuery; -import org.apache.lucene.search.spans.SpanQuery; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; /** - * Parser for {@link SpanContainingQuery} + * Parser for span_containing query */ -public class SpanContainingQueryParser implements QueryParser { - - public static final String NAME = "span_containing"; - - @Inject - public SpanContainingQueryParser() { - } +public class SpanContainingQueryParser implements QueryParser { @Override public String[] names() { - return new String[]{NAME, Strings.toCamelCase(NAME)}; + return new String[]{SpanContainingQueryBuilder.NAME, Strings.toCamelCase(SpanContainingQueryBuilder.NAME)}; } @Override - public Query parse(QueryParseContext parseContext) throws IOException, ParsingException { + public SpanContainingQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException { XContentParser parser = parseContext.parser(); - - float boost = 1.0f; + float boost = AbstractQueryBuilder.DEFAULT_BOOST; String queryName = null; - SpanQuery big = null; - SpanQuery little = null; + SpanQueryBuilder big = null; + SpanQueryBuilder little = null; String currentFieldName = null; XContentParser.Token token; @@ -61,43 +50,36 @@ public class SpanContainingQueryParser implements QueryParser { currentFieldName = parser.currentName(); } else if (token == XContentParser.Token.START_OBJECT) { if ("big".equals(currentFieldName)) { - Query query = parseContext.parseInnerQuery(); - if (!(query instanceof SpanQuery)) { - throw new ParsingException(parseContext, "span_containing [big] must be of type span query"); + QueryBuilder query = parseContext.parseInnerQueryBuilder(); + if (!(query instanceof SpanQueryBuilder)) { + throw new ParsingException(parser.getTokenLocation(), "span_containing [big] must be of type span query"); } - big = (SpanQuery) query; + big = (SpanQueryBuilder) query; } else if ("little".equals(currentFieldName)) { - Query query = parseContext.parseInnerQuery(); - if (!(query instanceof SpanQuery)) { - throw new ParsingException(parseContext, "span_containing [little] must be of type span query"); + QueryBuilder query = parseContext.parseInnerQueryBuilder(); + if (!(query instanceof SpanQueryBuilder)) { + throw new ParsingException(parser.getTokenLocation(), "span_containing [little] must be of type span query"); } - little = (SpanQuery) query; + little = (SpanQueryBuilder) query; } else { - throw new ParsingException(parseContext, "[span_containing] query does not support [" + currentFieldName + "]"); + throw new ParsingException(parser.getTokenLocation(), "[span_containing] query does not support [" + currentFieldName + "]"); } } else if ("boost".equals(currentFieldName)) { boost = parser.floatValue(); } else if ("_name".equals(currentFieldName)) { queryName = parser.text(); } else { - throw new ParsingException(parseContext, "[span_containing] query does not support [" + currentFieldName + "]"); + throw new ParsingException(parser.getTokenLocation(), "[span_containing] query does not support [" + currentFieldName + "]"); } - } - - if (big == null) { - throw new ParsingException(parseContext, "span_containing must include [big]"); - } - if (little == null) { - throw new ParsingException(parseContext, "span_containing must include [little]"); } - Query query = new SpanContainingQuery(big, little); - if (boost != 1.0F) { - query.setBoost(boost); - } - if (queryName != null) { - parseContext.addNamedQuery(queryName, query); - } + SpanContainingQueryBuilder query = new SpanContainingQueryBuilder(big, little); + query.boost(boost).queryName(queryName); return query; } + + @Override + public SpanContainingQueryBuilder getBuilderPrototype() { + return SpanContainingQueryBuilder.PROTOTYPE; + } } diff --git a/core/src/main/java/org/elasticsearch/index/query/SpanFirstQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/SpanFirstQueryBuilder.java index f967a1c1c07..b4dcd3006eb 100644 --- a/core/src/main/java/org/elasticsearch/index/query/SpanFirstQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/SpanFirstQueryBuilder.java @@ -19,51 +19,101 @@ package org.elasticsearch.index.query; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.spans.SpanFirstQuery; +import org.apache.lucene.search.spans.SpanQuery; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import java.io.IOException; +import java.util.Objects; -public class SpanFirstQueryBuilder extends SpanQueryBuilder implements BoostableQueryBuilder { +public class SpanFirstQueryBuilder extends AbstractQueryBuilder implements SpanQueryBuilder{ + + public static final String NAME = "span_first"; private final SpanQueryBuilder matchBuilder; private final int end; - private float boost = -1; - - private String queryName; + static final SpanFirstQueryBuilder PROTOTYPE = new SpanFirstQueryBuilder(SpanTermQueryBuilder.PROTOTYPE, 0); + /** + * Query that matches spans queries defined in matchBuilder + * whose end position is less than or equal to end. + * @param matchBuilder inner {@link SpanQueryBuilder} + * @param end maximum end position of the match, needs to be positive + * @throws IllegalArgumentException for negative end positions + */ public SpanFirstQueryBuilder(SpanQueryBuilder matchBuilder, int end) { + if (matchBuilder == null) { + throw new IllegalArgumentException("inner span query cannot be null"); + } + if (end < 0) { + throw new IllegalArgumentException("parameter [end] needs to be positive."); + } this.matchBuilder = matchBuilder; this.end = end; } - @Override - public SpanFirstQueryBuilder boost(float boost) { - this.boost = boost; - return this; + /** + * @return the inner {@link SpanQueryBuilder} defined in this query + */ + public SpanQueryBuilder innerQuery() { + return this.matchBuilder; } /** - * Sets the query name for the filter that can be used when searching for matched_filters per hit. + * @return maximum end position of the matching inner span query */ - public SpanFirstQueryBuilder queryName(String queryName) { - this.queryName = queryName; - return this; + public int end() { + return this.end; } @Override protected void doXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(SpanFirstQueryParser.NAME); + builder.startObject(NAME); builder.field("match"); matchBuilder.toXContent(builder, params); builder.field("end", end); - if (boost != -1) { - builder.field("boost", boost); - } - if (queryName != null) { - builder.field("_name", queryName); - } + printBoostAndQueryName(builder); builder.endObject(); } + + @Override + protected Query doToQuery(QueryShardContext context) throws IOException { + Query innerSpanQuery = matchBuilder.toQuery(context); + assert innerSpanQuery instanceof SpanQuery; + return new SpanFirstQuery((SpanQuery) innerSpanQuery, end); + } + + @Override + protected SpanFirstQueryBuilder doReadFrom(StreamInput in) throws IOException { + SpanQueryBuilder matchBuilder = (SpanQueryBuilder)in.readQuery(); + int end = in.readInt(); + return new SpanFirstQueryBuilder(matchBuilder, end); + } + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + out.writeQuery(matchBuilder); + out.writeInt(end); + } + + @Override + protected int doHashCode() { + return Objects.hash(matchBuilder, end); + } + + @Override + protected boolean doEquals(SpanFirstQueryBuilder other) { + return Objects.equals(matchBuilder, other.matchBuilder) && + Objects.equals(end, other.end); + } + + @Override + public String getWriteableName() { + return NAME; + } } diff --git a/core/src/main/java/org/elasticsearch/index/query/SpanFirstQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/SpanFirstQueryParser.java index 5d4693dad50..e417b45bed3 100644 --- a/core/src/main/java/org/elasticsearch/index/query/SpanFirstQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/SpanFirstQueryParser.java @@ -19,40 +19,30 @@ package org.elasticsearch.index.query; -import org.apache.lucene.search.Query; -import org.apache.lucene.search.spans.SpanFirstQuery; -import org.apache.lucene.search.spans.SpanQuery; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; /** - * + * Parser for span_first query */ -public class SpanFirstQueryParser implements QueryParser { - - public static final String NAME = "span_first"; - - @Inject - public SpanFirstQueryParser() { - } +public class SpanFirstQueryParser implements QueryParser { @Override public String[] names() { - return new String[]{NAME, Strings.toCamelCase(NAME)}; + return new String[]{SpanFirstQueryBuilder.NAME, Strings.toCamelCase(SpanFirstQueryBuilder.NAME)}; } @Override - public Query parse(QueryParseContext parseContext) throws IOException, ParsingException { + public SpanFirstQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException { XContentParser parser = parseContext.parser(); - float boost = 1.0f; + float boost = AbstractQueryBuilder.DEFAULT_BOOST; - SpanQuery match = null; - int end = -1; + SpanQueryBuilder match = null; + Integer end = null; String queryName = null; String currentFieldName = null; @@ -62,13 +52,13 @@ public class SpanFirstQueryParser implements QueryParser { currentFieldName = parser.currentName(); } else if (token == XContentParser.Token.START_OBJECT) { if ("match".equals(currentFieldName)) { - Query query = parseContext.parseInnerQuery(); - if (!(query instanceof SpanQuery)) { - throw new ParsingException(parseContext, "spanFirst [match] must be of type span query"); + QueryBuilder query = parseContext.parseInnerQueryBuilder(); + if (!(query instanceof SpanQueryBuilder)) { + throw new ParsingException(parser.getTokenLocation(), "spanFirst [match] must be of type span query"); } - match = (SpanQuery) query; + match = (SpanQueryBuilder) query; } else { - throw new ParsingException(parseContext, "[span_first] query does not support [" + currentFieldName + "]"); + throw new ParsingException(parser.getTokenLocation(), "[span_first] query does not support [" + currentFieldName + "]"); } } else { if ("boost".equals(currentFieldName)) { @@ -78,22 +68,23 @@ public class SpanFirstQueryParser implements QueryParser { } else if ("_name".equals(currentFieldName)) { queryName = parser.text(); } else { - throw new ParsingException(parseContext, "[span_first] query does not support [" + currentFieldName + "]"); + throw new ParsingException(parser.getTokenLocation(), "[span_first] query does not support [" + currentFieldName + "]"); } } } if (match == null) { - throw new ParsingException(parseContext, "spanFirst must have [match] span query clause"); + throw new ParsingException(parser.getTokenLocation(), "spanFirst must have [match] span query clause"); } - if (end == -1) { - throw new ParsingException(parseContext, "spanFirst must have [end] set for it"); + if (end == null) { + throw new ParsingException(parser.getTokenLocation(), "spanFirst must have [end] set for it"); } - - SpanFirstQuery query = new SpanFirstQuery(match, end); - query.setBoost(boost); - if (queryName != null) { - parseContext.addNamedQuery(queryName, query); - } - return query; + SpanFirstQueryBuilder queryBuilder = new SpanFirstQueryBuilder(match, end); + queryBuilder.boost(boost).queryName(queryName); + return queryBuilder; } -} \ No newline at end of file + + @Override + public SpanFirstQueryBuilder getBuilderPrototype() { + return SpanFirstQueryBuilder.PROTOTYPE; + } +} diff --git a/core/src/main/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilder.java index 11b98972c51..eac2e6a0a7a 100644 --- a/core/src/main/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilder.java @@ -18,25 +18,80 @@ */ package org.elasticsearch.index.query; +import org.apache.lucene.search.MultiTermQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.spans.SpanMultiTermQueryWrapper; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import java.io.IOException; +import java.util.Objects; -public class SpanMultiTermQueryBuilder extends SpanQueryBuilder { +/** + * Query that allows wraping a {@link MultiTermQueryBuilder} (one of wildcard, fuzzy, prefix, term, range or regexp query) + * as a {@link SpanQueryBuilder} so it can be nested. + */ +public class SpanMultiTermQueryBuilder extends AbstractQueryBuilder implements SpanQueryBuilder { - private MultiTermQueryBuilder multiTermQueryBuilder; + public static final String NAME = "span_multi"; + private final MultiTermQueryBuilder multiTermQueryBuilder; + static final SpanMultiTermQueryBuilder PROTOTYPE = new SpanMultiTermQueryBuilder(RangeQueryBuilder.PROTOTYPE); public SpanMultiTermQueryBuilder(MultiTermQueryBuilder multiTermQueryBuilder) { + if (multiTermQueryBuilder == null) { + throw new IllegalArgumentException("inner multi term query cannot be null"); + } this.multiTermQueryBuilder = multiTermQueryBuilder; } + public MultiTermQueryBuilder innerQuery() { + return this.multiTermQueryBuilder; + } + @Override protected void doXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(SpanMultiTermQueryParser.NAME); + builder.startObject(NAME); builder.field(SpanMultiTermQueryParser.MATCH_NAME); multiTermQueryBuilder.toXContent(builder, params); + printBoostAndQueryName(builder); builder.endObject(); } + @Override + protected Query doToQuery(QueryShardContext context) throws IOException { + Query subQuery = multiTermQueryBuilder.toQuery(context); + if (subQuery instanceof MultiTermQuery == false) { + throw new UnsupportedOperationException("unsupported inner query, should be " + MultiTermQuery.class.getName() +" but was " + + subQuery.getClass().getName()); + } + return new SpanMultiTermQueryWrapper<>((MultiTermQuery) subQuery); + } + + @Override + protected SpanMultiTermQueryBuilder doReadFrom(StreamInput in) throws IOException { + MultiTermQueryBuilder multiTermBuilder = (MultiTermQueryBuilder)in.readQuery(); + return new SpanMultiTermQueryBuilder(multiTermBuilder); + } + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + out.writeQuery(multiTermQueryBuilder); + } + + @Override + protected int doHashCode() { + return Objects.hash(multiTermQueryBuilder); + } + + @Override + protected boolean doEquals(SpanMultiTermQueryBuilder other) { + return Objects.equals(multiTermQueryBuilder, other.multiTermQueryBuilder); + } + + @Override + public String getWriteableName() { + return NAME; + } } diff --git a/core/src/main/java/org/elasticsearch/index/query/SpanMultiTermQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/SpanMultiTermQueryParser.java index bb043aa3733..e51b693187c 100644 --- a/core/src/main/java/org/elasticsearch/index/query/SpanMultiTermQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/SpanMultiTermQueryParser.java @@ -18,54 +18,65 @@ */ package org.elasticsearch.index.query; -import org.apache.lucene.search.MultiTermQuery; -import org.apache.lucene.search.Query; -import org.apache.lucene.search.spans.SpanMultiTermQueryWrapper; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.XContentParser.Token; import java.io.IOException; /** - * + * Parser for span_multi query */ -public class SpanMultiTermQueryParser implements QueryParser { +public class SpanMultiTermQueryParser implements QueryParser { - public static final String NAME = "span_multi"; public static final String MATCH_NAME = "match"; - @Inject - public SpanMultiTermQueryParser() { - } - @Override public String[] names() { - return new String[]{NAME, Strings.toCamelCase(NAME)}; + return new String[]{SpanMultiTermQueryBuilder.NAME, Strings.toCamelCase(SpanMultiTermQueryBuilder.NAME)}; } @Override - public Query parse(QueryParseContext parseContext) throws IOException, ParsingException { + public SpanMultiTermQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException { XContentParser parser = parseContext.parser(); - - Token token = parser.nextToken(); - if (!MATCH_NAME.equals(parser.currentName()) || token != XContentParser.Token.FIELD_NAME) { - throw new ParsingException(parseContext, "spanMultiTerm must have [" + MATCH_NAME + "] multi term query clause"); + String currentFieldName = null; + MultiTermQueryBuilder subQuery = null; + String queryName = null; + float boost = AbstractQueryBuilder.DEFAULT_BOOST; + XContentParser.Token token; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + } else if (token == XContentParser.Token.START_OBJECT) { + if (MATCH_NAME.equals(currentFieldName)) { + QueryBuilder innerQuery = parseContext.parseInnerQueryBuilder(); + if (innerQuery instanceof MultiTermQueryBuilder == false) { + throw new ParsingException(parser.getTokenLocation(), "[span_multi] [" + MATCH_NAME + "] must be of type multi term query"); + } + subQuery = (MultiTermQueryBuilder) innerQuery; + } else { + throw new ParsingException(parser.getTokenLocation(), "[span_multi] query does not support [" + currentFieldName + "]"); + } + } else if (token.isValue()) { + if ("_name".equals(currentFieldName)) { + queryName = parser.text(); + } else if ("boost".equals(currentFieldName)) { + boost = parser.floatValue(); + } else { + throw new ParsingException(parser.getTokenLocation(), "[span_multi] query does not support [" + currentFieldName + "]"); + } + } } - token = parser.nextToken(); - if (token != XContentParser.Token.START_OBJECT) { - throw new ParsingException(parseContext, "spanMultiTerm must have [" + MATCH_NAME + "] multi term query clause"); + if (subQuery == null) { + throw new ParsingException(parser.getTokenLocation(), "[span_multi] must have [" + MATCH_NAME + "] multi term query clause"); } - Query subQuery = parseContext.parseInnerQuery(); - if (!(subQuery instanceof MultiTermQuery)) { - throw new ParsingException(parseContext, "spanMultiTerm [" + MATCH_NAME + "] must be of type multi term query"); - } + return new SpanMultiTermQueryBuilder(subQuery).queryName(queryName).boost(boost); + } - parser.nextToken(); - return new SpanMultiTermQueryWrapper<>((MultiTermQuery) subQuery); + @Override + public SpanMultiTermQueryBuilder getBuilderPrototype() { + return SpanMultiTermQueryBuilder.PROTOTYPE; } } diff --git a/core/src/main/java/org/elasticsearch/index/query/SpanNearQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/SpanNearQueryBuilder.java index cb05e084c4f..a0435245653 100644 --- a/core/src/main/java/org/elasticsearch/index/query/SpanNearQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/SpanNearQueryBuilder.java @@ -19,86 +19,171 @@ package org.elasticsearch.index.query; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.spans.SpanNearQuery; +import org.apache.lucene.search.spans.SpanQuery; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import java.io.IOException; import java.util.ArrayList; +import java.util.List; +import java.util.Objects; -public class SpanNearQueryBuilder extends SpanQueryBuilder implements BoostableQueryBuilder { +/** + * Matches spans which are near one another. One can specify slop, the maximum number + * of intervening unmatched positions, as well as whether matches are required to be in-order. + * The span near query maps to Lucene {@link SpanNearQuery}. + */ +public class SpanNearQueryBuilder extends AbstractQueryBuilder implements SpanQueryBuilder { - private ArrayList clauses = new ArrayList<>(); + public static final String NAME = "span_near"; - private Integer slop = null; + /** Default for flag controlling whether matches are required to be in-order */ + public static boolean DEFAULT_IN_ORDER = true; - private Boolean inOrder; + /** Default for flag controlling whether payloads are collected */ + public static boolean DEFAULT_COLLECT_PAYLOADS = true; - private Boolean collectPayloads; + private final List clauses = new ArrayList<>(); - private float boost = -1; + private final int slop; - private String queryName; + private boolean inOrder = DEFAULT_IN_ORDER; + + private boolean collectPayloads = DEFAULT_COLLECT_PAYLOADS; + + static final SpanNearQueryBuilder PROTOTYPE = new SpanNearQueryBuilder(SpanTermQueryBuilder.PROTOTYPE, 0); + + /** + * @param initialClause an initial span query clause + * @param slop controls the maximum number of intervening unmatched positions permitted + */ + public SpanNearQueryBuilder(SpanQueryBuilder initialClause, int slop) { + if (initialClause == null) { + throw new IllegalArgumentException("query must include at least one clause"); + } + this.clauses.add(initialClause); + this.slop = slop; + } + + /** + * @return the maximum number of intervening unmatched positions permitted + */ + public int slop() { + return this.slop; + } public SpanNearQueryBuilder clause(SpanQueryBuilder clause) { + if (clause == null) { + throw new IllegalArgumentException("query clauses cannot be null"); + } clauses.add(clause); return this; } - public SpanNearQueryBuilder slop(int slop) { - this.slop = slop; - return this; + /** + * @return the {@link SpanQueryBuilder} clauses that were set for this query + */ + public List clauses() { + return this.clauses; } + /** + * When inOrder is true, the spans from each clause + * must be in the same order as in clauses and must be non-overlapping. + * Defaults to true + */ public SpanNearQueryBuilder inOrder(boolean inOrder) { this.inOrder = inOrder; return this; } + /** + * @see SpanNearQueryBuilder#inOrder(boolean) + */ + public boolean inOrder() { + return this.inOrder; + } + + /** + * @param collectPayloads flag controlling whether payloads are collected + */ public SpanNearQueryBuilder collectPayloads(boolean collectPayloads) { this.collectPayloads = collectPayloads; return this; } - @Override - public SpanNearQueryBuilder boost(float boost) { - this.boost = boost; - return this; - } - /** - * Sets the query name for the filter that can be used when searching for matched_filters per hit. + * @see SpanNearQueryBuilder#collectPayloads(boolean) */ - public SpanNearQueryBuilder queryName(String queryName) { - this.queryName = queryName; - return this; + public boolean collectPayloads() { + return this.collectPayloads; } @Override protected void doXContent(XContentBuilder builder, Params params) throws IOException { - if (clauses.isEmpty()) { - throw new IllegalArgumentException("Must have at least one clause when building a spanNear query"); - } - if (slop == null) { - throw new IllegalArgumentException("Must set the slop when building a spanNear query"); - } - builder.startObject(SpanNearQueryParser.NAME); + builder.startObject(NAME); builder.startArray("clauses"); for (SpanQueryBuilder clause : clauses) { clause.toXContent(builder, params); } builder.endArray(); - builder.field("slop", slop.intValue()); - if (inOrder != null) { - builder.field("in_order", inOrder); - } - if (collectPayloads != null) { - builder.field("collect_payloads", collectPayloads); - } - if (boost != -1) { - builder.field("boost", boost); - } - if (queryName != null) { - builder.field("_name", queryName); - } + builder.field("slop", slop); + builder.field("in_order", inOrder); + builder.field("collect_payloads", collectPayloads); + printBoostAndQueryName(builder); builder.endObject(); } + + @Override + protected Query doToQuery(QueryShardContext context) throws IOException { + SpanQuery[] spanQueries = new SpanQuery[clauses.size()]; + for (int i = 0; i < clauses.size(); i++) { + Query query = clauses.get(i).toQuery(context); + assert query instanceof SpanQuery; + spanQueries[i] = (SpanQuery) query; + } + return new SpanNearQuery(spanQueries, slop, inOrder, collectPayloads); + } + + @Override + protected SpanNearQueryBuilder doReadFrom(StreamInput in) throws IOException { + List clauses = readQueries(in); + SpanNearQueryBuilder queryBuilder = new SpanNearQueryBuilder((SpanQueryBuilder)clauses.get(0), in.readVInt()); + for (int i = 1; i < clauses.size(); i++) { + queryBuilder.clauses.add((SpanQueryBuilder)clauses.get(i)); + } + queryBuilder.collectPayloads = in.readBoolean(); + queryBuilder.inOrder = in.readBoolean(); + return queryBuilder; + + } + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + writeQueries(out, clauses); + out.writeVInt(slop); + out.writeBoolean(collectPayloads); + out.writeBoolean(inOrder); + } + + @Override + protected int doHashCode() { + return Objects.hash(clauses, slop, collectPayloads, inOrder); + } + + @Override + protected boolean doEquals(SpanNearQueryBuilder other) { + return Objects.equals(clauses, other.clauses) && + Objects.equals(slop, other.slop) && + Objects.equals(collectPayloads, other.collectPayloads) && + Objects.equals(inOrder, other.inOrder); + } + + @Override + public String getWriteableName() { + return NAME; + } } diff --git a/core/src/main/java/org/elasticsearch/index/query/SpanNearQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/SpanNearQueryParser.java index 86c1e47645e..4600f698231 100644 --- a/core/src/main/java/org/elasticsearch/index/query/SpanNearQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/SpanNearQueryParser.java @@ -19,12 +19,8 @@ package org.elasticsearch.index.query; -import org.apache.lucene.search.Query; -import org.apache.lucene.search.spans.SpanNearQuery; -import org.apache.lucene.search.spans.SpanQuery; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; @@ -32,32 +28,26 @@ import java.util.ArrayList; import java.util.List; /** - * + * Parser for span_near query */ -public class SpanNearQueryParser implements QueryParser { - - public static final String NAME = "span_near"; - - @Inject - public SpanNearQueryParser() { - } +public class SpanNearQueryParser implements QueryParser { @Override public String[] names() { - return new String[]{NAME, Strings.toCamelCase(NAME)}; + return new String[]{SpanNearQueryBuilder.NAME, Strings.toCamelCase(SpanNearQueryBuilder.NAME)}; } @Override - public Query parse(QueryParseContext parseContext) throws IOException, ParsingException { + public SpanNearQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException { XContentParser parser = parseContext.parser(); - float boost = 1.0f; + float boost = AbstractQueryBuilder.DEFAULT_BOOST; Integer slop = null; - boolean inOrder = true; - boolean collectPayloads = true; + boolean inOrder = SpanNearQueryBuilder.DEFAULT_IN_ORDER; + boolean collectPayloads = SpanNearQueryBuilder.DEFAULT_COLLECT_PAYLOADS; String queryName = null; - List clauses = new ArrayList<>(); + List clauses = new ArrayList<>(); String currentFieldName = null; XContentParser.Token token; @@ -67,14 +57,14 @@ public class SpanNearQueryParser implements QueryParser { } else if (token == XContentParser.Token.START_ARRAY) { if ("clauses".equals(currentFieldName)) { while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { - Query query = parseContext.parseInnerQuery(); - if (!(query instanceof SpanQuery)) { - throw new ParsingException(parseContext, "spanNear [clauses] must be of type span query"); + QueryBuilder query = parseContext.parseInnerQueryBuilder(); + if (!(query instanceof SpanQueryBuilder)) { + throw new ParsingException(parser.getTokenLocation(), "spanNear [clauses] must be of type span query"); } - clauses.add((SpanQuery) query); + clauses.add((SpanQueryBuilder) query); } } else { - throw new ParsingException(parseContext, "[span_near] query does not support [" + currentFieldName + "]"); + throw new ParsingException(parser.getTokenLocation(), "[span_near] query does not support [" + currentFieldName + "]"); } } else if (token.isValue()) { if ("in_order".equals(currentFieldName) || "inOrder".equals(currentFieldName)) { @@ -82,30 +72,40 @@ public class SpanNearQueryParser implements QueryParser { } else if ("collect_payloads".equals(currentFieldName) || "collectPayloads".equals(currentFieldName)) { collectPayloads = parser.booleanValue(); } else if ("slop".equals(currentFieldName)) { - slop = Integer.valueOf(parser.intValue()); + slop = parser.intValue(); } else if ("boost".equals(currentFieldName)) { boost = parser.floatValue(); } else if ("_name".equals(currentFieldName)) { queryName = parser.text(); } else { - throw new ParsingException(parseContext, "[span_near] query does not support [" + currentFieldName + "]"); + throw new ParsingException(parser.getTokenLocation(), "[span_near] query does not support [" + currentFieldName + "]"); } } else { - throw new ParsingException(parseContext, "[span_near] query does not support [" + currentFieldName + "]"); + throw new ParsingException(parser.getTokenLocation(), "[span_near] query does not support [" + currentFieldName + "]"); } } + if (clauses.isEmpty()) { - throw new ParsingException(parseContext, "span_near must include [clauses]"); - } - if (slop == null) { - throw new ParsingException(parseContext, "span_near must include [slop]"); + throw new ParsingException(parser.getTokenLocation(), "span_near must include [clauses]"); } - SpanNearQuery query = new SpanNearQuery(clauses.toArray(new SpanQuery[clauses.size()]), slop.intValue(), inOrder, collectPayloads); - query.setBoost(boost); - if (queryName != null) { - parseContext.addNamedQuery(queryName, query); + if (slop == null) { + throw new ParsingException(parser.getTokenLocation(), "span_near must include [slop]"); } - return query; + + SpanNearQueryBuilder queryBuilder = new SpanNearQueryBuilder(clauses.get(0), slop); + for (int i = 1; i < clauses.size(); i++) { + queryBuilder.clause(clauses.get(i)); + } + queryBuilder.inOrder(inOrder); + queryBuilder.collectPayloads(collectPayloads); + queryBuilder.boost(boost); + queryBuilder.queryName(queryName); + return queryBuilder; + } + + @Override + public SpanNearQueryBuilder getBuilderPrototype() { + return SpanNearQueryBuilder.PROTOTYPE; } } diff --git a/core/src/main/java/org/elasticsearch/index/query/SpanNotQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/SpanNotQueryBuilder.java index e37cd80a5a7..ffe3cecf412 100644 --- a/core/src/main/java/org/elasticsearch/index/query/SpanNotQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/SpanNotQueryBuilder.java @@ -19,100 +19,166 @@ package org.elasticsearch.index.query; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.spans.SpanNotQuery; +import org.apache.lucene.search.spans.SpanQuery; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import java.io.IOException; +import java.util.Objects; -public class SpanNotQueryBuilder extends SpanQueryBuilder implements BoostableQueryBuilder { +public class SpanNotQueryBuilder extends AbstractQueryBuilder implements SpanQueryBuilder { - private SpanQueryBuilder include; + public static final String NAME = "span_not"; - private SpanQueryBuilder exclude; + /** the default pre parameter size */ + public static final int DEFAULT_PRE = 0; + /** the default post parameter size */ + public static final int DEFAULT_POST = 0; - private Integer dist; + private final SpanQueryBuilder include; - private Integer pre; + private final SpanQueryBuilder exclude; - private Integer post; + private int pre = DEFAULT_PRE; - private Float boost; + private int post = DEFAULT_POST; - private String queryName; + static final SpanNotQueryBuilder PROTOTYPE = new SpanNotQueryBuilder(SpanTermQueryBuilder.PROTOTYPE, SpanTermQueryBuilder.PROTOTYPE); - public SpanNotQueryBuilder include(SpanQueryBuilder include) { + /** + * Construct a span query matching spans from include which + * have no overlap with spans from exclude. + * @param include the span query whose matches are filtered + * @param exclude the span query whose matches must not overlap + */ + public SpanNotQueryBuilder(SpanQueryBuilder include, SpanQueryBuilder exclude) { + if (include == null) { + throw new IllegalArgumentException("inner clause [include] cannot be null."); + } + if (exclude == null) { + throw new IllegalArgumentException("inner clause [exclude] cannot be null."); + } this.include = include; - return this; - } - - public SpanNotQueryBuilder exclude(SpanQueryBuilder exclude) { this.exclude = exclude; - return this; } + /** + * @return the span query whose matches are filtered + */ + public SpanQueryBuilder includeQuery() { + return this.include; + } + + /** + * @return the span query whose matches must not overlap + */ + public SpanQueryBuilder excludeQuery() { + return this.exclude; + } + + /** + * @param dist the amount of tokens from within the include span can’t have overlap with the exclude span. + * Equivalent to setting both pre and post parameter. + */ public SpanNotQueryBuilder dist(int dist) { - this.dist = dist; + pre(dist); + post(dist); return this; } + /** + * @param pre the amount of tokens before the include span that can’t have overlap with the exclude span. Values + * smaller than 0 will be ignored and 0 used instead. + */ public SpanNotQueryBuilder pre(int pre) { - this.pre = (pre >=0) ? pre : 0; + this.pre = (pre >= 0) ? pre : 0; return this; } + /** + * @return the amount of tokens before the include span that can’t have overlap with the exclude span. + * @see SpanNotQueryBuilder#pre(int) + */ + public Integer pre() { + return this.pre; + } + + /** + * @param post the amount of tokens after the include span that can’t have overlap with the exclude span. + */ public SpanNotQueryBuilder post(int post) { this.post = (post >= 0) ? post : 0; return this; } - @Override - public SpanNotQueryBuilder boost(float boost) { - this.boost = boost; - return this; - } - /** - * Sets the query name for the filter that can be used when searching for matched_filters per hit. - * @param queryName The query name - * @return this + * @return the amount of tokens after the include span that can’t have overlap with the exclude span. + * @see SpanNotQueryBuilder#post(int) */ - public SpanNotQueryBuilder queryName(String queryName) { - this.queryName = queryName; - return this; + public Integer post() { + return this.post; } @Override protected void doXContent(XContentBuilder builder, Params params) throws IOException { - if (include == null) { - throw new IllegalArgumentException("Must specify include when using spanNot query"); - } - if (exclude == null) { - throw new IllegalArgumentException("Must specify exclude when using spanNot query"); - } - - if (dist != null && (pre != null || post != null)) { - throw new IllegalArgumentException("spanNot can either use [dist] or [pre] & [post] (or none)"); - } - - builder.startObject(SpanNotQueryParser.NAME); + builder.startObject(NAME); builder.field("include"); include.toXContent(builder, params); builder.field("exclude"); exclude.toXContent(builder, params); - if (dist != null) { - builder.field("dist", dist); - } - if (pre != null) { - builder.field("pre", pre); - } - if (post != null) { - builder.field("post", post); - } - if (boost != null) { - builder.field("boost", boost); - } - if (queryName != null) { - builder.field("_name", queryName); - } + builder.field("pre", pre); + builder.field("post", post); + printBoostAndQueryName(builder); builder.endObject(); } + + @Override + protected Query doToQuery(QueryShardContext context) throws IOException { + + Query includeQuery = this.include.toQuery(context); + assert includeQuery instanceof SpanQuery; + Query excludeQuery = this.exclude.toQuery(context); + assert excludeQuery instanceof SpanQuery; + + return new SpanNotQuery((SpanQuery) includeQuery, (SpanQuery) excludeQuery, pre, post); + } + + @Override + protected SpanNotQueryBuilder doReadFrom(StreamInput in) throws IOException { + SpanQueryBuilder include = (SpanQueryBuilder)in.readQuery(); + SpanQueryBuilder exclude = (SpanQueryBuilder)in.readQuery(); + SpanNotQueryBuilder queryBuilder = new SpanNotQueryBuilder(include, exclude); + queryBuilder.pre(in.readVInt()); + queryBuilder.post(in.readVInt()); + return queryBuilder; + } + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + out.writeQuery(include); + out.writeQuery(exclude); + out.writeVInt(pre); + out.writeVInt(post); + } + + @Override + protected int doHashCode() { + return Objects.hash(include, exclude, pre, post); + } + + @Override + protected boolean doEquals(SpanNotQueryBuilder other) { + return Objects.equals(include, other.include) && + Objects.equals(exclude, other.exclude) && + (pre == other.pre) && + (post == other.post); + } + + @Override + public String getWriteableName() { + return NAME; + } } diff --git a/core/src/main/java/org/elasticsearch/index/query/SpanNotQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/SpanNotQueryParser.java index 4b135e17c93..4b4876c8cff 100644 --- a/core/src/main/java/org/elasticsearch/index/query/SpanNotQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/SpanNotQueryParser.java @@ -19,40 +19,30 @@ package org.elasticsearch.index.query; -import org.apache.lucene.search.Query; -import org.apache.lucene.search.spans.SpanNotQuery; -import org.apache.lucene.search.spans.SpanQuery; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; /** - * + * Parser for span_not query */ -public class SpanNotQueryParser implements QueryParser { - - public static final String NAME = "span_not"; - - @Inject - public SpanNotQueryParser() { - } +public class SpanNotQueryParser implements QueryParser { @Override public String[] names() { - return new String[]{NAME, Strings.toCamelCase(NAME)}; + return new String[]{SpanNotQueryBuilder.NAME, Strings.toCamelCase(SpanNotQueryBuilder.NAME)}; } @Override - public Query parse(QueryParseContext parseContext) throws IOException, ParsingException { + public SpanNotQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException { XContentParser parser = parseContext.parser(); - float boost = 1.0f; + float boost = AbstractQueryBuilder.DEFAULT_BOOST; - SpanQuery include = null; - SpanQuery exclude = null; + SpanQueryBuilder include = null; + SpanQueryBuilder exclude = null; Integer dist = null; Integer pre = null; @@ -67,19 +57,19 @@ public class SpanNotQueryParser implements QueryParser { currentFieldName = parser.currentName(); } else if (token == XContentParser.Token.START_OBJECT) { if ("include".equals(currentFieldName)) { - Query query = parseContext.parseInnerQuery(); - if (!(query instanceof SpanQuery)) { - throw new ParsingException(parseContext, "spanNot [include] must be of type span query"); + QueryBuilder query = parseContext.parseInnerQueryBuilder(); + if (!(query instanceof SpanQueryBuilder)) { + throw new ParsingException(parser.getTokenLocation(), "spanNot [include] must be of type span query"); } - include = (SpanQuery) query; + include = (SpanQueryBuilder) query; } else if ("exclude".equals(currentFieldName)) { - Query query = parseContext.parseInnerQuery(); - if (!(query instanceof SpanQuery)) { - throw new ParsingException(parseContext, "spanNot [exclude] must be of type span query"); + QueryBuilder query = parseContext.parseInnerQueryBuilder(); + if (!(query instanceof SpanQueryBuilder)) { + throw new ParsingException(parser.getTokenLocation(), "spanNot [exclude] must be of type span query"); } - exclude = (SpanQuery) query; + exclude = (SpanQueryBuilder) query; } else { - throw new ParsingException(parseContext, "[span_not] query does not support [" + currentFieldName + "]"); + throw new ParsingException(parser.getTokenLocation(), "[span_not] query does not support [" + currentFieldName + "]"); } } else { if ("dist".equals(currentFieldName)) { @@ -93,40 +83,37 @@ public class SpanNotQueryParser implements QueryParser { } else if ("_name".equals(currentFieldName)) { queryName = parser.text(); } else { - throw new ParsingException(parseContext, "[span_not] query does not support [" + currentFieldName + "]"); + throw new ParsingException(parser.getTokenLocation(), "[span_not] query does not support [" + currentFieldName + "]"); } } } if (include == null) { - throw new ParsingException(parseContext, "spanNot must have [include] span query clause"); + throw new ParsingException(parser.getTokenLocation(), "spanNot must have [include] span query clause"); } if (exclude == null) { - throw new ParsingException(parseContext, "spanNot must have [exclude] span query clause"); + throw new ParsingException(parser.getTokenLocation(), "spanNot must have [exclude] span query clause"); } if (dist != null && (pre != null || post != null)) { - throw new ParsingException(parseContext, "spanNot can either use [dist] or [pre] & [post] (or none)"); + throw new ParsingException(parser.getTokenLocation(), "spanNot can either use [dist] or [pre] & [post] (or none)"); } - // set appropriate defaults - if (pre != null && post == null) { - post = 0; - } else if (pre == null && post != null){ - pre = 0; + SpanNotQueryBuilder spanNotQuery = new SpanNotQueryBuilder(include, exclude); + if (dist != null) { + spanNotQuery.dist(dist); } - - SpanNotQuery query; - if (pre != null && post != null) { - query = new SpanNotQuery(include, exclude, pre, post); - } else if (dist != null) { - query = new SpanNotQuery(include, exclude, dist); - } else { - query = new SpanNotQuery(include, exclude); + if (pre != null) { + spanNotQuery.pre(pre); } - - query.setBoost(boost); - if (queryName != null) { - parseContext.addNamedQuery(queryName, query); + if (post != null) { + spanNotQuery.post(post); } - return query; + spanNotQuery.boost(boost); + spanNotQuery.queryName(queryName); + return spanNotQuery; } -} \ No newline at end of file + + @Override + public SpanNotQueryBuilder getBuilderPrototype() { + return SpanNotQueryBuilder.PROTOTYPE; + } +} diff --git a/core/src/main/java/org/elasticsearch/index/query/SpanOrQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/SpanOrQueryBuilder.java index 0042aa7c44c..a46bef4e520 100644 --- a/core/src/main/java/org/elasticsearch/index/query/SpanOrQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/SpanOrQueryBuilder.java @@ -19,55 +19,102 @@ package org.elasticsearch.index.query; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.spans.SpanOrQuery; +import org.apache.lucene.search.spans.SpanQuery; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import java.io.IOException; import java.util.ArrayList; +import java.util.List; +import java.util.Objects; -public class SpanOrQueryBuilder extends SpanQueryBuilder implements BoostableQueryBuilder { +/** + * Span query that matches the union of its clauses. Maps to {@link SpanOrQuery}. + */ +public class SpanOrQueryBuilder extends AbstractQueryBuilder implements SpanQueryBuilder { - private ArrayList clauses = new ArrayList<>(); + public static final String NAME = "span_or"; - private float boost = -1; + private final List clauses = new ArrayList<>(); - private String queryName; + static final SpanOrQueryBuilder PROTOTYPE = new SpanOrQueryBuilder(SpanTermQueryBuilder.PROTOTYPE); + + public SpanOrQueryBuilder(SpanQueryBuilder initialClause) { + if (initialClause == null) { + throw new IllegalArgumentException("query must include at least one clause"); + } + clauses.add(initialClause); + } public SpanOrQueryBuilder clause(SpanQueryBuilder clause) { + if (clause == null) { + throw new IllegalArgumentException("inner bool query clause cannot be null"); + } clauses.add(clause); return this; } - @Override - public SpanOrQueryBuilder boost(float boost) { - this.boost = boost; - return this; - } - /** - * Sets the query name for the filter that can be used when searching for matched_filters per hit. + * @return the {@link SpanQueryBuilder} clauses that were set for this query */ - public SpanOrQueryBuilder queryName(String queryName) { - this.queryName = queryName; - return this; + public List clauses() { + return this.clauses; } @Override protected void doXContent(XContentBuilder builder, Params params) throws IOException { - if (clauses.isEmpty()) { - throw new IllegalArgumentException("Must have at least one clause when building a spanOr query"); - } - builder.startObject(SpanOrQueryParser.NAME); + builder.startObject(NAME); builder.startArray("clauses"); for (SpanQueryBuilder clause : clauses) { clause.toXContent(builder, params); } builder.endArray(); - if (boost != -1) { - builder.field("boost", boost); - } - if (queryName != null) { - builder.field("_name", queryName); - } + printBoostAndQueryName(builder); builder.endObject(); } + + @Override + protected Query doToQuery(QueryShardContext context) throws IOException { + SpanQuery[] spanQueries = new SpanQuery[clauses.size()]; + for (int i = 0; i < clauses.size(); i++) { + Query query = clauses.get(i).toQuery(context); + assert query instanceof SpanQuery; + spanQueries[i] = (SpanQuery) query; + } + return new SpanOrQuery(spanQueries); + } + + @Override + protected SpanOrQueryBuilder doReadFrom(StreamInput in) throws IOException { + List clauses = readQueries(in); + SpanOrQueryBuilder queryBuilder = new SpanOrQueryBuilder((SpanQueryBuilder)clauses.get(0)); + for (int i = 1; i < clauses.size(); i++) { + queryBuilder.clauses.add((SpanQueryBuilder)clauses.get(i)); + } + return queryBuilder; + + } + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + writeQueries(out, clauses); + } + + @Override + protected int doHashCode() { + return Objects.hash(clauses); + } + + @Override + protected boolean doEquals(SpanOrQueryBuilder other) { + return Objects.equals(clauses, other.clauses); + } + + @Override + public String getWriteableName() { + return NAME; + } } diff --git a/core/src/main/java/org/elasticsearch/index/query/SpanOrQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/SpanOrQueryParser.java index 91f6ad17257..a0dabbdad06 100644 --- a/core/src/main/java/org/elasticsearch/index/query/SpanOrQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/SpanOrQueryParser.java @@ -19,12 +19,8 @@ package org.elasticsearch.index.query; -import org.apache.lucene.search.Query; -import org.apache.lucene.search.spans.SpanOrQuery; -import org.apache.lucene.search.spans.SpanQuery; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; @@ -32,29 +28,23 @@ import java.util.ArrayList; import java.util.List; /** - * + * Parser for span_or query */ -public class SpanOrQueryParser implements QueryParser { - - public static final String NAME = "span_or"; - - @Inject - public SpanOrQueryParser() { - } +public class SpanOrQueryParser implements QueryParser { @Override public String[] names() { - return new String[]{NAME, Strings.toCamelCase(NAME)}; + return new String[]{SpanOrQueryBuilder.NAME, Strings.toCamelCase(SpanOrQueryBuilder.NAME)}; } @Override - public Query parse(QueryParseContext parseContext) throws IOException, ParsingException { + public SpanOrQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException { XContentParser parser = parseContext.parser(); - float boost = 1.0f; + float boost = AbstractQueryBuilder.DEFAULT_BOOST; String queryName = null; - List clauses = new ArrayList<>(); + List clauses = new ArrayList<>(); String currentFieldName = null; XContentParser.Token token; @@ -64,14 +54,14 @@ public class SpanOrQueryParser implements QueryParser { } else if (token == XContentParser.Token.START_ARRAY) { if ("clauses".equals(currentFieldName)) { while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { - Query query = parseContext.parseInnerQuery(); - if (!(query instanceof SpanQuery)) { - throw new ParsingException(parseContext, "spanOr [clauses] must be of type span query"); + QueryBuilder query = parseContext.parseInnerQueryBuilder(); + if (!(query instanceof SpanQueryBuilder)) { + throw new ParsingException(parser.getTokenLocation(), "spanOr [clauses] must be of type span query"); } - clauses.add((SpanQuery) query); + clauses.add((SpanQueryBuilder) query); } } else { - throw new ParsingException(parseContext, "[span_or] query does not support [" + currentFieldName + "]"); + throw new ParsingException(parser.getTokenLocation(), "[span_or] query does not support [" + currentFieldName + "]"); } } else { if ("boost".equals(currentFieldName)) { @@ -79,19 +69,26 @@ public class SpanOrQueryParser implements QueryParser { } else if ("_name".equals(currentFieldName)) { queryName = parser.text(); } else { - throw new ParsingException(parseContext, "[span_or] query does not support [" + currentFieldName + "]"); + throw new ParsingException(parser.getTokenLocation(), "[span_or] query does not support [" + currentFieldName + "]"); } } } + if (clauses.isEmpty()) { - throw new ParsingException(parseContext, "spanOr must include [clauses]"); + throw new ParsingException(parser.getTokenLocation(), "spanOr must include [clauses]"); } - SpanOrQuery query = new SpanOrQuery(clauses.toArray(new SpanQuery[clauses.size()])); - query.setBoost(boost); - if (queryName != null) { - parseContext.addNamedQuery(queryName, query); + SpanOrQueryBuilder queryBuilder = new SpanOrQueryBuilder(clauses.get(0)); + for (int i = 1; i < clauses.size(); i++) { + queryBuilder.clause(clauses.get(i)); } - return query; + queryBuilder.boost(boost); + queryBuilder.queryName(queryName); + return queryBuilder; } -} \ No newline at end of file + + @Override + public SpanOrQueryBuilder getBuilderPrototype() { + return SpanOrQueryBuilder.PROTOTYPE; + } +} diff --git a/core/src/main/java/org/elasticsearch/index/query/SpanQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/SpanQueryBuilder.java index 4216f2257a3..d35dcbc536a 100644 --- a/core/src/main/java/org/elasticsearch/index/query/SpanQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/SpanQueryBuilder.java @@ -19,6 +19,9 @@ package org.elasticsearch.index.query; -public abstract class SpanQueryBuilder extends QueryBuilder { +/** + * Marker interface for a specific type of {@link QueryBuilder} that allows to build span queries + */ +public interface SpanQueryBuilder extends QueryBuilder { } diff --git a/core/src/main/java/org/elasticsearch/index/query/SpanTermQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/SpanTermQueryBuilder.java index 9d0176e2974..fc41dc4ba0d 100644 --- a/core/src/main/java/org/elasticsearch/index/query/SpanTermQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/SpanTermQueryBuilder.java @@ -19,75 +19,76 @@ package org.elasticsearch.index.query; -import org.elasticsearch.common.xcontent.XContentBuilder; +import org.apache.lucene.index.Term; +import org.apache.lucene.search.spans.SpanQuery; +import org.apache.lucene.search.spans.SpanTermQuery; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.lucene.BytesRefs; +import org.elasticsearch.index.mapper.MappedFieldType; import java.io.IOException; -public class SpanTermQueryBuilder extends SpanQueryBuilder implements BoostableQueryBuilder { +/** + * A Span Query that matches documents containing a term. + * @see SpanTermQuery + */ +public class SpanTermQueryBuilder extends BaseTermQueryBuilder implements SpanQueryBuilder { - private final String name; - - private final Object value; - - private float boost = -1; - - private String queryName; + public static final String NAME = "span_term"; + static final SpanTermQueryBuilder PROTOTYPE = new SpanTermQueryBuilder("name", "value"); + /** @see BaseTermQueryBuilder#BaseTermQueryBuilder(String, String) */ public SpanTermQueryBuilder(String name, String value) { - this(name, (Object) value); + super(name, (Object) value); } + /** @see BaseTermQueryBuilder#BaseTermQueryBuilder(String, int) */ public SpanTermQueryBuilder(String name, int value) { - this(name, (Object) value); + super(name, (Object) value); } + /** @see BaseTermQueryBuilder#BaseTermQueryBuilder(String, long) */ public SpanTermQueryBuilder(String name, long value) { - this(name, (Object) value); + super(name, (Object) value); } + /** @see BaseTermQueryBuilder#BaseTermQueryBuilder(String, float) */ public SpanTermQueryBuilder(String name, float value) { - this(name, (Object) value); + super(name, (Object) value); } + /** @see BaseTermQueryBuilder#BaseTermQueryBuilder(String, double) */ public SpanTermQueryBuilder(String name, double value) { - this(name, (Object) value); + super(name, (Object) value); } - private SpanTermQueryBuilder(String name, Object value) { - this.name = name; - this.value = value; + /** @see BaseTermQueryBuilder#BaseTermQueryBuilder(String, Object) */ + public SpanTermQueryBuilder(String name, Object value) { + super(name, value); } @Override - public SpanTermQueryBuilder boost(float boost) { - this.boost = boost; - return this; - } - - /** - * Sets the query name for the filter that can be used when searching for matched_filters per hit. - */ - public SpanTermQueryBuilder queryName(String queryName) { - this.queryName = queryName; - return this; - } - - @Override - public void doXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(SpanTermQueryParser.NAME); - if (boost == -1 && queryName != null) { - builder.field(name, value); - } else { - builder.startObject(name); - builder.field("value", value); - if (boost != -1) { - builder.field("boost", boost); - } - if (queryName != null) { - builder.field("_name", queryName); - } - builder.endObject(); + public SpanQuery doToQuery(QueryShardContext context) throws IOException { + BytesRef valueBytes = null; + String fieldName = this.fieldName; + MappedFieldType mapper = context.fieldMapper(fieldName); + if (mapper != null) { + fieldName = mapper.names().indexName(); + valueBytes = mapper.indexedValueForSearch(value); } - builder.endObject(); + if (valueBytes == null) { + valueBytes = BytesRefs.toBytesRef(this.value); + } + return new SpanTermQuery(new Term(fieldName, valueBytes)); } -} \ No newline at end of file + + @Override + protected SpanTermQueryBuilder createBuilder(String fieldName, Object value) { + return new SpanTermQueryBuilder(fieldName, value); + } + + @Override + public String getWriteableName() { + return NAME; + } +} diff --git a/core/src/main/java/org/elasticsearch/index/query/SpanTermQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/SpanTermQueryParser.java index d9e28e9028f..5caefac77b6 100644 --- a/core/src/main/java/org/elasticsearch/index/query/SpanTermQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/SpanTermQueryParser.java @@ -19,48 +19,37 @@ package org.elasticsearch.index.query; -import org.apache.lucene.index.Term; -import org.apache.lucene.search.Query; -import org.apache.lucene.search.spans.SpanTermQuery; -import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.mapper.MappedFieldType; import java.io.IOException; /** - * + * Parser for span_term query */ -public class SpanTermQueryParser implements QueryParser { - - public static final String NAME = "span_term"; - - @Inject - public SpanTermQueryParser() { - } +public class SpanTermQueryParser implements QueryParser { @Override public String[] names() { - return new String[]{NAME, Strings.toCamelCase(NAME)}; + return new String[]{SpanTermQueryBuilder.NAME, Strings.toCamelCase(SpanTermQueryBuilder.NAME)}; } @Override - public Query parse(QueryParseContext parseContext) throws IOException, ParsingException { + public SpanTermQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException, ParsingException { XContentParser parser = parseContext.parser(); XContentParser.Token token = parser.currentToken(); if (token == XContentParser.Token.START_OBJECT) { token = parser.nextToken(); } + assert token == XContentParser.Token.FIELD_NAME; String fieldName = parser.currentName(); - String value = null; - float boost = 1.0f; + Object value = null; + float boost = AbstractQueryBuilder.DEFAULT_BOOST; String queryName = null; token = parser.nextToken(); if (token == XContentParser.Token.START_OBJECT) { @@ -70,44 +59,36 @@ public class SpanTermQueryParser implements QueryParser { currentFieldName = parser.currentName(); } else { if ("term".equals(currentFieldName)) { - value = parser.text(); + value = parser.objectBytes(); } else if ("value".equals(currentFieldName)) { - value = parser.text(); + value = parser.objectBytes(); } else if ("boost".equals(currentFieldName)) { boost = parser.floatValue(); } else if ("_name".equals(currentFieldName)) { queryName = parser.text(); } else { - throw new ParsingException(parseContext, "[span_term] query does not support [" + currentFieldName + "]"); + throw new ParsingException(parser.getTokenLocation(), "[span_term] query does not support [" + currentFieldName + "]"); } } } parser.nextToken(); } else { - value = parser.text(); + value = parser.objectBytes(); // move to the next token parser.nextToken(); } if (value == null) { - throw new ParsingException(parseContext, "No value specified for term query"); + throw new ParsingException(parser.getTokenLocation(), "No value specified for term query"); } - BytesRef valueBytes = null; - MappedFieldType fieldType = parseContext.fieldMapper(fieldName); - if (fieldType != null) { - fieldName = fieldType.names().indexName(); - valueBytes = fieldType.indexedValueForSearch(value); - } - if (valueBytes == null) { - valueBytes = new BytesRef(value); - } - - SpanTermQuery query = new SpanTermQuery(new Term(fieldName, valueBytes)); - query.setBoost(boost); - if (queryName != null) { - parseContext.addNamedQuery(queryName, query); - } - return query; + SpanTermQueryBuilder result = new SpanTermQueryBuilder(fieldName, value); + result.boost(boost).queryName(queryName); + return result; } -} \ No newline at end of file + + @Override + public SpanTermQueryBuilder getBuilderPrototype() { + return SpanTermQueryBuilder.PROTOTYPE; + } +} diff --git a/core/src/main/java/org/elasticsearch/index/query/SpanWithinQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/SpanWithinQueryBuilder.java index d2b2fdc408b..c3a11c8f325 100644 --- a/core/src/main/java/org/elasticsearch/index/query/SpanWithinQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/SpanWithinQueryBuilder.java @@ -19,59 +19,59 @@ package org.elasticsearch.index.query; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.spans.SpanQuery; +import org.apache.lucene.search.spans.SpanWithinQuery; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import java.io.IOException; +import java.util.Objects; /** * Builder for {@link org.apache.lucene.search.spans.SpanWithinQuery}. */ -public class SpanWithinQueryBuilder extends SpanQueryBuilder implements BoostableQueryBuilder { +public class SpanWithinQueryBuilder extends AbstractQueryBuilder implements SpanQueryBuilder { - private SpanQueryBuilder big; - private SpanQueryBuilder little; - private float boost = -1; - private String queryName; + public static final String NAME = "span_within"; + private final SpanQueryBuilder big; + private final SpanQueryBuilder little; + static final SpanWithinQueryBuilder PROTOTYPE = new SpanWithinQueryBuilder(SpanTermQueryBuilder.PROTOTYPE, SpanTermQueryBuilder.PROTOTYPE); - /** - * Sets the little clause, it must be contained within {@code big} for a match. + /** + * Query that returns spans from little that are contained in a spans from big. + * @param big clause that must enclose {@code little} for a match. + * @param little the little clause, it must be contained within {@code big} for a match. */ - public SpanWithinQueryBuilder little(SpanQueryBuilder clause) { - this.little = clause; - return this; - } - - /** - * Sets the big clause, it must enclose {@code little} for a match. - */ - public SpanWithinQueryBuilder big(SpanQueryBuilder clause) { - this.big = clause; - return this; - } - - @Override - public SpanWithinQueryBuilder boost(float boost) { - this.boost = boost; - return this; + public SpanWithinQueryBuilder(SpanQueryBuilder big, SpanQueryBuilder little) { + if (big == null) { + throw new IllegalArgumentException("inner clause [big] cannot be null."); + } + if (little == null) { + throw new IllegalArgumentException("inner clause [little] cannot be null."); + } + this.little = little; + this.big = big; } /** - * Sets the query name for the filter that can be used when searching for matched_filters per hit. + * @return the little clause, contained within {@code big} for a match. */ - public SpanWithinQueryBuilder queryName(String queryName) { - this.queryName = queryName; - return this; + public SpanQueryBuilder littleQuery() { + return this.little; + } + + /** + * @return the big clause that must enclose {@code little} for a match. + */ + public SpanQueryBuilder bigQuery() { + return this.big; } @Override protected void doXContent(XContentBuilder builder, Params params) throws IOException { - if (big == null) { - throw new IllegalArgumentException("Must specify big clause when building a span_within query"); - } - if (little == null) { - throw new IllegalArgumentException("Must specify little clause when building a span_within query"); - } - builder.startObject(SpanWithinQueryParser.NAME); + builder.startObject(NAME); builder.field("big"); big.toXContent(builder, params); @@ -79,14 +79,46 @@ public class SpanWithinQueryBuilder extends SpanQueryBuilder implements Boostabl builder.field("little"); little.toXContent(builder, params); - if (boost != -1) { - builder.field("boost", boost); - } - - if (queryName != null) { - builder.field("_name", queryName); - } + printBoostAndQueryName(builder); builder.endObject(); } + + @Override + protected Query doToQuery(QueryShardContext context) throws IOException { + Query innerBig = big.toQuery(context); + assert innerBig instanceof SpanQuery; + Query innerLittle = little.toQuery(context); + assert innerLittle instanceof SpanQuery; + return new SpanWithinQuery((SpanQuery) innerBig, (SpanQuery) innerLittle); + } + + @Override + protected SpanWithinQueryBuilder doReadFrom(StreamInput in) throws IOException { + SpanQueryBuilder big = (SpanQueryBuilder)in.readQuery(); + SpanQueryBuilder little = (SpanQueryBuilder)in.readQuery(); + return new SpanWithinQueryBuilder(big, little); + } + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + out.writeQuery(big); + out.writeQuery(little); + } + + @Override + protected int doHashCode() { + return Objects.hash(big, little); + } + + @Override + protected boolean doEquals(SpanWithinQueryBuilder other) { + return Objects.equals(big, other.big) && + Objects.equals(little, other.little); + } + + @Override + public String getWriteableName() { + return NAME; + } } diff --git a/core/src/main/java/org/elasticsearch/index/query/SpanWithinQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/SpanWithinQueryParser.java index 8e960d7ae3a..1acb4eaecd2 100644 --- a/core/src/main/java/org/elasticsearch/index/query/SpanWithinQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/SpanWithinQueryParser.java @@ -19,40 +19,30 @@ package org.elasticsearch.index.query; -import org.apache.lucene.search.Query; -import org.apache.lucene.search.spans.SpanQuery; -import org.apache.lucene.search.spans.SpanWithinQuery; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; /** - * Parser for {@link SpanWithinQuery} + * Parser for span_within query */ -public class SpanWithinQueryParser implements QueryParser { - - public static final String NAME = "span_within"; - - @Inject - public SpanWithinQueryParser() { - } +public class SpanWithinQueryParser implements QueryParser { @Override public String[] names() { - return new String[]{NAME, Strings.toCamelCase(NAME)}; + return new String[]{SpanWithinQueryBuilder.NAME, Strings.toCamelCase(SpanWithinQueryBuilder.NAME)}; } @Override - public Query parse(QueryParseContext parseContext) throws IOException, ParsingException { + public SpanWithinQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException { XContentParser parser = parseContext.parser(); - float boost = 1.0f; + float boost = AbstractQueryBuilder.DEFAULT_BOOST; String queryName = null; - SpanQuery big = null; - SpanQuery little = null; + SpanQueryBuilder big = null; + SpanQueryBuilder little = null; String currentFieldName = null; XContentParser.Token token; @@ -61,43 +51,43 @@ public class SpanWithinQueryParser implements QueryParser { currentFieldName = parser.currentName(); } else if (token == XContentParser.Token.START_OBJECT) { if ("big".equals(currentFieldName)) { - Query query = parseContext.parseInnerQuery(); - if (query instanceof SpanQuery == false) { - throw new ParsingException(parseContext, "span_within [big] must be of type span query"); + QueryBuilder query = parseContext.parseInnerQueryBuilder(); + if (query instanceof SpanQueryBuilder == false) { + throw new ParsingException(parser.getTokenLocation(), "span_within [big] must be of type span query"); } - big = (SpanQuery) query; + big = (SpanQueryBuilder) query; } else if ("little".equals(currentFieldName)) { - Query query = parseContext.parseInnerQuery(); - if (query instanceof SpanQuery == false) { - throw new ParsingException(parseContext, "span_within [little] must be of type span query"); + QueryBuilder query = parseContext.parseInnerQueryBuilder(); + if (query instanceof SpanQueryBuilder == false) { + throw new ParsingException(parser.getTokenLocation(), "span_within [little] must be of type span query"); } - little = (SpanQuery) query; + little = (SpanQueryBuilder) query; } else { - throw new ParsingException(parseContext, "[span_within] query does not support [" + currentFieldName + "]"); + throw new ParsingException(parser.getTokenLocation(), "[span_within] query does not support [" + currentFieldName + "]"); } } else if ("boost".equals(currentFieldName)) { boost = parser.floatValue(); } else if ("_name".equals(currentFieldName)) { queryName = parser.text(); } else { - throw new ParsingException(parseContext, "[span_within] query does not support [" + currentFieldName + "]"); + throw new ParsingException(parser.getTokenLocation(), "[span_within] query does not support [" + currentFieldName + "]"); } - } - - if (big == null) { - throw new ParsingException(parseContext, "span_within must include [big]"); - } - if (little == null) { - throw new ParsingException(parseContext, "span_within must include [little]"); } - Query query = new SpanWithinQuery(big, little); - if (boost != 1.0F) { - query.setBoost(boost); + if (big == null) { + throw new ParsingException(parser.getTokenLocation(), "span_within must include [big]"); } - if (queryName != null) { - parseContext.addNamedQuery(queryName, query); + if (little == null) { + throw new ParsingException(parser.getTokenLocation(), "span_within must include [little]"); } + + SpanWithinQueryBuilder query = new SpanWithinQueryBuilder(big, little); + query.boost(boost).queryName(queryName); return query; } + + @Override + public SpanWithinQueryBuilder getBuilderPrototype() { + return SpanWithinQueryBuilder.PROTOTYPE; + } } diff --git a/core/src/main/java/org/elasticsearch/index/query/TemplateQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/TemplateQueryBuilder.java index 852977fa0db..d63f160542e 100644 --- a/core/src/main/java/org/elasticsearch/index/query/TemplateQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/TemplateQueryBuilder.java @@ -18,35 +18,49 @@ */ package org.elasticsearch.index.query; +import org.apache.lucene.search.Query; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.Template; +import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; import java.util.Map; +import java.util.Objects; /** * Facilitates creating template query requests. * */ -public class TemplateQueryBuilder extends QueryBuilder { +public class TemplateQueryBuilder extends AbstractQueryBuilder { + + /** Name to reference this type of query. */ + public static final String NAME = "template"; /** Template to fill. */ - private Template template; - /** Parameters to fill the template with. */ - private Map vars; - /** Template to fill.*/ - private String templateString; + private final Template template; - private ScriptService.ScriptType templateType; + static final TemplateQueryBuilder PROTOTYPE = new TemplateQueryBuilder(new Template("proto")); /** * @param template * the template to use for that query. * */ public TemplateQueryBuilder(Template template) { + if (template == null) { + throw new IllegalArgumentException("query template cannot be null"); + } this.template = template; } + public Template template() { + return template; + } + /** * @param template * the template to use for that query. @@ -56,7 +70,7 @@ public class TemplateQueryBuilder extends QueryBuilder { * */ @Deprecated public TemplateQueryBuilder(String template, Map vars) { - this(template, ScriptService.ScriptType.INLINE, vars); + this(new Template(template, ScriptService.ScriptType.INLINE, null, null, vars)); } /** @@ -70,18 +84,55 @@ public class TemplateQueryBuilder extends QueryBuilder { * */ @Deprecated public TemplateQueryBuilder(String template, ScriptService.ScriptType templateType, Map vars) { - this.templateString = template; - this.vars = vars; - this.templateType = templateType; + this(new Template(template, templateType, null, null, vars)); } @Override protected void doXContent(XContentBuilder builder, Params builderParams) throws IOException { - builder.field(TemplateQueryParser.NAME); - if (template == null) { - new Template(templateString, templateType, null, null, this.vars).toXContent(builder, builderParams); - } else { - template.toXContent(builder, builderParams); + builder.field(TemplateQueryBuilder.NAME); + template.toXContent(builder, builderParams); + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + protected Query doToQuery(QueryShardContext context) throws IOException { + BytesReference querySource = context.executeQueryTemplate(template, SearchContext.current()); + try (XContentParser qSourceParser = XContentFactory.xContent(querySource).createParser(querySource)) { + final QueryShardContext contextCopy = new QueryShardContext(context.index(), context.indexQueryParserService()); + contextCopy.reset(qSourceParser); + QueryBuilder result = contextCopy.parseContext().parseInnerQueryBuilder(); + context.combineNamedQueries(contextCopy); + return result.toQuery(context); } } + + @Override + protected void setFinalBoost(Query query) { + //no-op this query doesn't support boost + } + + @Override + protected TemplateQueryBuilder doReadFrom(StreamInput in) throws IOException { + TemplateQueryBuilder templateQueryBuilder = new TemplateQueryBuilder(Template.readTemplate(in)); + return templateQueryBuilder; + } + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + template.writeTo(out); + } + + @Override + protected int doHashCode() { + return Objects.hash(template); + } + + @Override + protected boolean doEquals(TemplateQueryBuilder other) { + return Objects.equals(template, other.template); + } } diff --git a/core/src/main/java/org/elasticsearch/index/query/TemplateQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/TemplateQueryParser.java index 1b5210d56dd..0df2460c847 100644 --- a/core/src/main/java/org/elasticsearch/index/query/TemplateQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/TemplateQueryParser.java @@ -18,18 +18,11 @@ */ package org.elasticsearch.index.query; -import org.apache.lucene.search.Query; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseFieldMatcher; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.script.ExecutableScript; -import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.Template; -import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; import java.util.HashMap; @@ -39,14 +32,7 @@ import java.util.Map; * In the simplest case, parse template string and variables from the request, * compile the template and execute the template against the given variables. * */ -public class TemplateQueryParser implements QueryParser { - - /** Name to reference this type of query. */ - public static final String NAME = "template"; - /** Name of query parameter containing the template string. */ - public static final String QUERY = "query"; - - private final ScriptService scriptService; +public class TemplateQueryParser implements QueryParser { private final static Map parametersToTypes = new HashMap<>(); static { @@ -55,14 +41,9 @@ public class TemplateQueryParser implements QueryParser { parametersToTypes.put("id", ScriptService.ScriptType.INDEXED); } - @Inject - public TemplateQueryParser(ScriptService scriptService) { - this.scriptService = scriptService; - } - @Override public String[] names() { - return new String[] { NAME }; + return new String[] {TemplateQueryBuilder.NAME}; } /** @@ -70,27 +51,17 @@ public class TemplateQueryParser implements QueryParser { * values. Handles both submitting the template as part of the request as * well as referencing only the template name. * - * @param parseContext - * parse context containing the templated query. + * @param parseContext parse context containing the templated query. */ @Override @Nullable - public Query parse(QueryParseContext parseContext) throws IOException { + public TemplateQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException { XContentParser parser = parseContext.parser(); Template template = parse(parser, parseContext.parseFieldMatcher()); - ExecutableScript executable = this.scriptService.executable(template, ScriptContext.Standard.SEARCH, SearchContext.current()); - - BytesReference querySource = (BytesReference) executable.run(); - - try (XContentParser qSourceParser = XContentFactory.xContent(querySource).createParser(querySource)) { - final QueryParseContext context = new QueryParseContext(parseContext.index(), parseContext.indexQueryParserService()); - context.reset(qSourceParser); - return context.parseInnerQuery(); - } + return new TemplateQueryBuilder(template); } public static Template parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher, String... parameters) throws IOException { - Map parameterMap = new HashMap<>(parametersToTypes); for (String parameter : parameters) { parameterMap.put(parameter, ScriptService.ScriptType.INLINE); @@ -114,4 +85,9 @@ public class TemplateQueryParser implements QueryParser { public static Template parse(XContentParser parser, Map parameterMap, ParseFieldMatcher parseFieldMatcher) throws IOException { return Template.parse(parser, parameterMap, parseFieldMatcher); } + + @Override + public TemplateQueryBuilder getBuilderPrototype() { + return TemplateQueryBuilder.PROTOTYPE; + } } diff --git a/core/src/main/java/org/elasticsearch/index/query/TermQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/TermQueryBuilder.java index 5bd911a2e59..bed373b9f0c 100644 --- a/core/src/main/java/org/elasticsearch/index/query/TermQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/TermQueryBuilder.java @@ -19,128 +19,77 @@ package org.elasticsearch.index.query; -import org.elasticsearch.common.xcontent.XContentBuilder; +import org.apache.lucene.index.Term; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.TermQuery; +import org.elasticsearch.common.lucene.BytesRefs; +import org.elasticsearch.index.mapper.MappedFieldType; import java.io.IOException; /** * A Query that matches documents containing a term. */ -public class TermQueryBuilder extends QueryBuilder implements BoostableQueryBuilder { +public class TermQueryBuilder extends BaseTermQueryBuilder { - private final String name; + public static final String NAME = "term"; + static final TermQueryBuilder PROTOTYPE = new TermQueryBuilder("name", "value"); - private final Object value; - - private float boost = -1; - - private String queryName; - - /** - * Constructs a new term query. - * - * @param name The name of the field - * @param value The value of the term - */ - public TermQueryBuilder(String name, String value) { - this(name, (Object) value); + /** @see BaseTermQueryBuilder#BaseTermQueryBuilder(String, String) */ + public TermQueryBuilder(String fieldName, String value) { + super(fieldName, (Object) value); } - /** - * Constructs a new term query. - * - * @param name The name of the field - * @param value The value of the term - */ - public TermQueryBuilder(String name, int value) { - this(name, (Object) value); + /** @see BaseTermQueryBuilder#BaseTermQueryBuilder(String, int) */ + public TermQueryBuilder(String fieldName, int value) { + super(fieldName, (Object) value); } - /** - * Constructs a new term query. - * - * @param name The name of the field - * @param value The value of the term - */ - public TermQueryBuilder(String name, long value) { - this(name, (Object) value); + /** @see BaseTermQueryBuilder#BaseTermQueryBuilder(String, long) */ + public TermQueryBuilder(String fieldName, long value) { + super(fieldName, (Object) value); } - /** - * Constructs a new term query. - * - * @param name The name of the field - * @param value The value of the term - */ - public TermQueryBuilder(String name, float value) { - this(name, (Object) value); + /** @see BaseTermQueryBuilder#BaseTermQueryBuilder(String, float) */ + public TermQueryBuilder(String fieldName, float value) { + super(fieldName, (Object) value); } - /** - * Constructs a new term query. - * - * @param name The name of the field - * @param value The value of the term - */ - public TermQueryBuilder(String name, double value) { - this(name, (Object) value); + /** @see BaseTermQueryBuilder#BaseTermQueryBuilder(String, double) */ + public TermQueryBuilder(String fieldName, double value) { + super(fieldName, (Object) value); } - /** - * Constructs a new term query. - * - * @param name The name of the field - * @param value The value of the term - */ - public TermQueryBuilder(String name, boolean value) { - this(name, (Object) value); + /** @see BaseTermQueryBuilder#BaseTermQueryBuilder(String, boolean) */ + public TermQueryBuilder(String fieldName, boolean value) { + super(fieldName, (Object) value); } - /** - * Constructs a new term query. - * - * @param name The name of the field - * @param value The value of the term - */ - public TermQueryBuilder(String name, Object value) { - this.name = name; - this.value = value; - } - - /** - * Sets the boost for this query. Documents matching this query will (in addition to the normal - * weightings) have their score multiplied by the boost provided. - */ - @Override - public TermQueryBuilder boost(float boost) { - this.boost = boost; - return this; - } - - /** - * Sets the query name for the filter that can be used when searching for matched_filters per hit. - */ - public TermQueryBuilder queryName(String queryName) { - this.queryName = queryName; - return this; + /** @see BaseTermQueryBuilder#BaseTermQueryBuilder(String, Object) */ + public TermQueryBuilder(String fieldName, Object value) { + super(fieldName, value); } @Override - public void doXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(TermQueryParser.NAME); - if (boost == -1 && queryName == null) { - builder.field(name, value); - } else { - builder.startObject(name); - builder.field("value", value); - if (boost != -1) { - builder.field("boost", boost); - } - if (queryName != null) { - builder.field("_name", queryName); - } - builder.endObject(); + public Query doToQuery(QueryShardContext context) throws IOException { + Query query = null; + MappedFieldType mapper = context.fieldMapper(this.fieldName); + if (mapper != null) { + query = mapper.termQuery(this.value, context); } - builder.endObject(); + if (query == null) { + query = new TermQuery(new Term(this.fieldName, BytesRefs.toBytesRef(this.value))); + } + return query; + } + + @Override + protected TermQueryBuilder createBuilder(String fieldName, Object value) { + return new TermQueryBuilder(fieldName, value); + } + + @Override + public String getWriteableName() { + return NAME; } } diff --git a/core/src/main/java/org/elasticsearch/index/query/TermQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/TermQueryParser.java index 335a22f7575..0591497a3c8 100644 --- a/core/src/main/java/org/elasticsearch/index/query/TermQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/TermQueryParser.java @@ -19,45 +19,33 @@ package org.elasticsearch.index.query; -import org.apache.lucene.index.Term; -import org.apache.lucene.search.Query; -import org.apache.lucene.search.TermQuery; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.mapper.MappedFieldType; import java.io.IOException; /** - * + * Parser for the term query */ -public class TermQueryParser implements QueryParser { - - public static final String NAME = "term"; +public class TermQueryParser implements QueryParser { private static final ParseField NAME_FIELD = new ParseField("_name").withAllDeprecated("query name is not supported in short version of term query"); private static final ParseField BOOST_FIELD = new ParseField("boost").withAllDeprecated("boost is not supported in short version of term query"); - @Inject - public TermQueryParser() { - } - @Override public String[] names() { - return new String[]{NAME}; + return new String[]{TermQueryBuilder.NAME}; } @Override - public Query parse(QueryParseContext parseContext) throws IOException, ParsingException { + public TermQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException { XContentParser parser = parseContext.parser(); String queryName = null; String fieldName = null; Object value = null; - float boost = 1.0f; + float boost = AbstractQueryBuilder.DEFAULT_BOOST; String currentFieldName = null; XContentParser.Token token; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { @@ -68,7 +56,7 @@ public class TermQueryParser implements QueryParser { } else if (token == XContentParser.Token.START_OBJECT) { // also support a format of "term" : {"field_name" : { ... }} if (fieldName != null) { - throw new ParsingException(parseContext, "[term] query does not support different field names, use [bool] query instead"); + throw new ParsingException(parser.getTokenLocation(), "[term] query does not support different field names, use [bool] query instead"); } fieldName = currentFieldName; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { @@ -84,7 +72,7 @@ public class TermQueryParser implements QueryParser { } else if ("boost".equals(currentFieldName)) { boost = parser.floatValue(); } else { - throw new ParsingException(parseContext, "[term] query does not support [" + currentFieldName + "]"); + throw new ParsingException(parser.getTokenLocation(), "[term] query does not support [" + currentFieldName + "]"); } } } @@ -95,32 +83,26 @@ public class TermQueryParser implements QueryParser { boost = parser.floatValue(); } else { if (fieldName != null) { - throw new ParsingException(parseContext, "[term] query does not support different field names, use [bool] query instead"); + throw new ParsingException(parser.getTokenLocation(), "[term] query does not support different field names, use [bool] query instead"); } fieldName = currentFieldName; value = parser.objectBytes(); } } else if (token == XContentParser.Token.START_ARRAY) { - throw new ParsingException(parseContext, "[term] query does not support array of values"); + throw new ParsingException(parser.getTokenLocation(), "[term] query does not support array of values"); } } - if (value == null) { - throw new ParsingException(parseContext, "No value specified for term query"); - } - - Query query = null; - MappedFieldType fieldType = parseContext.fieldMapper(fieldName); - if (fieldType != null) { - query = fieldType.termQuery(value, parseContext); - } - if (query == null) { - query = new TermQuery(new Term(fieldName, BytesRefs.toBytesRef(value))); - } - query.setBoost(boost); + TermQueryBuilder termQuery = new TermQueryBuilder(fieldName, value); + termQuery.boost(boost); if (queryName != null) { - parseContext.addNamedQuery(queryName, query); + termQuery.queryName(queryName); } - return query; + return termQuery; + } + + @Override + public TermQueryBuilder getBuilderPrototype() { + return TermQueryBuilder.PROTOTYPE; } } diff --git a/core/src/main/java/org/elasticsearch/index/query/TermsLookupQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/TermsLookupQueryBuilder.java deleted file mode 100644 index 4bdd0daca3f..00000000000 --- a/core/src/main/java/org/elasticsearch/index/query/TermsLookupQueryBuilder.java +++ /dev/null @@ -1,111 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.query; - -import org.elasticsearch.common.xcontent.XContentBuilder; - -import java.io.IOException; - -/** - * A filer for a field based on several terms matching on any of them. - */ -public class TermsLookupQueryBuilder extends QueryBuilder { - - private final String name; - private String lookupIndex; - private String lookupType; - private String lookupId; - private String lookupRouting; - private String lookupPath; - - private String queryName; - - public TermsLookupQueryBuilder(String name) { - this.name = name; - } - - /** - * Sets the filter name for the filter that can be used when searching for matched_filters per hit. - */ - public TermsLookupQueryBuilder queryName(String queryName) { - this.queryName = queryName; - return this; - } - - /** - * Sets the index name to lookup the terms from. - */ - public TermsLookupQueryBuilder lookupIndex(String lookupIndex) { - this.lookupIndex = lookupIndex; - return this; - } - - /** - * Sets the index type to lookup the terms from. - */ - public TermsLookupQueryBuilder lookupType(String lookupType) { - this.lookupType = lookupType; - return this; - } - - /** - * Sets the doc id to lookup the terms from. - */ - public TermsLookupQueryBuilder lookupId(String lookupId) { - this.lookupId = lookupId; - return this; - } - - /** - * Sets the path within the document to lookup the terms from. - */ - public TermsLookupQueryBuilder lookupPath(String lookupPath) { - this.lookupPath = lookupPath; - return this; - } - - public TermsLookupQueryBuilder lookupRouting(String lookupRouting) { - this.lookupRouting = lookupRouting; - return this; - } - - @Override - public void doXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(TermsQueryParser.NAME); - - builder.startObject(name); - if (lookupIndex != null) { - builder.field("index", lookupIndex); - } - builder.field("type", lookupType); - builder.field("id", lookupId); - if (lookupRouting != null) { - builder.field("routing", lookupRouting); - } - builder.field("path", lookupPath); - builder.endObject(); - - if (queryName != null) { - builder.field("_name", queryName); - } - - builder.endObject(); - } -} \ No newline at end of file diff --git a/core/src/main/java/org/elasticsearch/index/query/TermsQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/TermsQueryBuilder.java index ca54eb3b3d3..c913d802bd4 100644 --- a/core/src/main/java/org/elasticsearch/index/query/TermsQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/TermsQueryBuilder.java @@ -19,101 +19,160 @@ package org.elasticsearch.index.query; +import org.apache.lucene.index.Term; +import org.apache.lucene.queries.TermsQuery; +import org.apache.lucene.search.BooleanClause; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.TermQuery; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.action.get.GetRequest; +import org.elasticsearch.action.get.GetResponse; +import org.elasticsearch.client.Client; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.lucene.BytesRefs; +import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.indices.cache.query.terms.TermsLookup; +import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; +import java.util.*; +import java.util.stream.Collectors; +import java.util.stream.IntStream; /** - * A filer for a field based on several terms matching on any of them. + * A filter for a field based on several terms matching on any of them. */ -public class TermsQueryBuilder extends QueryBuilder implements BoostableQueryBuilder { +public class TermsQueryBuilder extends AbstractQueryBuilder { - private final String name; + public static final String NAME = "terms"; - private final Object values; + static final TermsQueryBuilder PROTOTYPE = new TermsQueryBuilder("field", "value"); + public static final boolean DEFAULT_DISABLE_COORD = false; + + private final String fieldName; + private final List values; + @Deprecated private String minimumShouldMatch; + @Deprecated + private boolean disableCoord = DEFAULT_DISABLE_COORD; + private final TermsLookup termsLookup; - private Boolean disableCoord; - - private String queryName; - - private float boost = -1; - - /** - * A filer for a field based on several terms matching on any of them. - * - * @param name The field name - * @param values The terms - */ - public TermsQueryBuilder(String name, String... values) { - this(name, (Object[]) values); + public TermsQueryBuilder(String fieldName, TermsLookup termsLookup) { + this(fieldName, null, null, DEFAULT_DISABLE_COORD, termsLookup); } /** - * A filer for a field based on several terms matching on any of them. - * - * @param name The field name - * @param values The terms + * constructor used internally for serialization of both value / termslookup variants */ - public TermsQueryBuilder(String name, int... values) { - this.name = name; + TermsQueryBuilder(String fieldName, List values, String minimumShouldMatch, boolean disableCoord, TermsLookup termsLookup) { + if (Strings.isEmpty(fieldName)) { + throw new IllegalArgumentException("field name cannot be null."); + } + if (values == null && termsLookup == null) { + throw new IllegalArgumentException("No value or termsLookup specified for terms query"); + } + if (values != null && termsLookup != null) { + throw new IllegalArgumentException("Both values and termsLookup specified for terms query"); + } + this.fieldName = fieldName; this.values = values; + this.disableCoord = disableCoord; + this.minimumShouldMatch = minimumShouldMatch; + this.termsLookup = termsLookup; } /** - * A filer for a field based on several terms matching on any of them. + * A filter for a field based on several terms matching on any of them. * - * @param name The field name + * @param fieldName The field name * @param values The terms */ - public TermsQueryBuilder(String name, long... values) { - this.name = name; - this.values = values; + public TermsQueryBuilder(String fieldName, String... values) { + this(fieldName, values != null ? Arrays.asList(values) : null); } /** - * A filer for a field based on several terms matching on any of them. + * A filter for a field based on several terms matching on any of them. * - * @param name The field name + * @param fieldName The field name * @param values The terms */ - public TermsQueryBuilder(String name, float... values) { - this.name = name; - this.values = values; + public TermsQueryBuilder(String fieldName, int... values) { + this(fieldName, values != null ? Arrays.stream(values).mapToObj(s -> s).collect(Collectors.toList()) : (Iterable) null); } /** - * A filer for a field based on several terms matching on any of them. + * A filter for a field based on several terms matching on any of them. * - * @param name The field name + * @param fieldName The field name * @param values The terms */ - public TermsQueryBuilder(String name, double... values) { - this.name = name; - this.values = values; + public TermsQueryBuilder(String fieldName, long... values) { + this(fieldName, values != null ? Arrays.stream(values).mapToObj(s -> s).collect(Collectors.toList()) : (Iterable) null); } /** - * A filer for a field based on several terms matching on any of them. + * A filter for a field based on several terms matching on any of them. * - * @param name The field name + * @param fieldName The field name * @param values The terms */ - public TermsQueryBuilder(String name, Object... values) { - this.name = name; - this.values = values; + public TermsQueryBuilder(String fieldName, float... values) { + this(fieldName, values != null ? IntStream.range(0, values.length) + .mapToObj(i -> values[i]).collect(Collectors.toList()) : (Iterable) null); } /** - * A filer for a field based on several terms matching on any of them. + * A filter for a field based on several terms matching on any of them. * - * @param name The field name + * @param fieldName The field name * @param values The terms */ - public TermsQueryBuilder(String name, Iterable values) { - this.name = name; - this.values = values; + public TermsQueryBuilder(String fieldName, double... values) { + this(fieldName, values != null ? Arrays.stream(values).mapToObj(s -> s).collect(Collectors.toList()) : (Iterable) null); + } + + /** + * A filter for a field based on several terms matching on any of them. + * + * @param fieldName The field name + * @param values The terms + */ + public TermsQueryBuilder(String fieldName, Object... values) { + this(fieldName, values != null ? Arrays.asList(values) : (Iterable) null); + } + + /** + * A filter for a field based on several terms matching on any of them. + * + * @param fieldName The field name + * @param values The terms + */ + public TermsQueryBuilder(String fieldName, Iterable values) { + if (Strings.isEmpty(fieldName)) { + throw new IllegalArgumentException("field name cannot be null."); + } + if (values == null) { + throw new IllegalArgumentException("No value specified for terms query"); + } + this.fieldName = fieldName; + this.values = convertToBytesRefListIfStringList(values); + this.termsLookup = null; + } + + public String fieldName() { + return this.fieldName; + } + + public List values() { + return convertToStringListIfBytesRefList(this.values); } /** @@ -126,6 +185,10 @@ public class TermsQueryBuilder extends QueryBuilder implements BoostableQueryBui return this; } + public String minimumShouldMatch() { + return this.minimumShouldMatch; + } + /** * Disables Similarity#coord(int,int) in scoring. Defaults to false. * @deprecated use [bool] query instead @@ -136,41 +199,174 @@ public class TermsQueryBuilder extends QueryBuilder implements BoostableQueryBui return this; } - /** - * Sets the filter name for the filter that can be used when searching for matched_filters per hit. - */ - public TermsQueryBuilder queryName(String queryName) { - this.queryName = queryName; - return this; + boolean disableCoord() { + return this.disableCoord; } - @Override - public TermsQueryBuilder boost(float boost) { - this.boost = boost; - return this; + public TermsLookup termsLookup() { + return this.termsLookup; + } + + /** + * Same as {@link #convertToBytesRefIfString} but on Iterable. + * @param objs the Iterable of input object + * @return the same input or a list of {@link BytesRef} representation if input was a list of type string + */ + private static List convertToBytesRefListIfStringList(Iterable objs) { + if (objs == null) { + return null; + } + List newObjs = new ArrayList<>(); + for (Object obj : objs) { + newObjs.add(convertToBytesRefIfString(obj)); + } + return newObjs; + } + + /** + * Same as {@link #convertToStringIfBytesRef} but on Iterable. + * @param objs the Iterable of input object + * @return the same input or a list of utf8 string if input was a list of type {@link BytesRef} + */ + private static List convertToStringListIfBytesRefList(Iterable objs) { + if (objs == null) { + return null; + } + List newObjs = new ArrayList<>(); + for (Object obj : objs) { + newObjs.add(convertToStringIfBytesRef(obj)); + } + return newObjs; } @Override public void doXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(TermsQueryParser.NAME); - builder.field(name, values); - + builder.startObject(NAME); + if (this.termsLookup != null) { + builder.startObject(fieldName); + termsLookup.toXContent(builder, params); + builder.endObject(); + } else { + builder.field(fieldName, convertToStringListIfBytesRefList(values)); + } if (minimumShouldMatch != null) { builder.field("minimum_should_match", minimumShouldMatch); } - - if (disableCoord != null) { + if (disableCoord != DEFAULT_DISABLE_COORD) { builder.field("disable_coord", disableCoord); } - - if (boost != -1) { - builder.field("boost", boost); - } - - if (queryName != null) { - builder.field("_name", queryName); - } - + printBoostAndQueryName(builder); builder.endObject(); } -} \ No newline at end of file + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + protected Query doToQuery(QueryShardContext context) throws IOException { + List terms; + if (this.termsLookup != null) { + if (termsLookup.index() == null) { + termsLookup.index(context.index().name()); + } + Client client = context.getClient(); + terms = fetch(termsLookup, client); + } else { + terms = values; + } + if (terms == null || terms.isEmpty()) { + return Queries.newMatchNoDocsQuery(); + } + return handleTermsQuery(terms, fieldName, context, minimumShouldMatch, disableCoord); + } + + private List fetch(TermsLookup termsLookup, Client client) { + List terms = new ArrayList<>(); + GetRequest getRequest = new GetRequest(termsLookup.index(), termsLookup.type(), termsLookup.id()) + .preference("_local").routing(termsLookup.routing()); + getRequest.copyContextAndHeadersFrom(SearchContext.current()); + final GetResponse getResponse = client.get(getRequest).actionGet(); + if (getResponse.isExists()) { + List extractedValues = XContentMapValues.extractRawValues(termsLookup.path(), getResponse.getSourceAsMap()); + terms.addAll(extractedValues); + } + return terms; + } + + private static Query handleTermsQuery(List terms, String fieldName, QueryShardContext context, String minimumShouldMatch, boolean disableCoord) { + MappedFieldType fieldType = context.fieldMapper(fieldName); + String indexFieldName; + if (fieldType != null) { + indexFieldName = fieldType.names().indexName(); + } else { + indexFieldName = fieldName; + } + + Query query; + if (context.isFilter()) { + if (fieldType != null) { + query = fieldType.termsQuery(terms, context); + } else { + BytesRef[] filterValues = new BytesRef[terms.size()]; + for (int i = 0; i < filterValues.length; i++) { + filterValues[i] = BytesRefs.toBytesRef(terms.get(i)); + } + query = new TermsQuery(indexFieldName, filterValues); + } + } else { + BooleanQuery.Builder bq = new BooleanQuery.Builder(); + bq.setDisableCoord(disableCoord); + for (Object term : terms) { + if (fieldType != null) { + bq.add(fieldType.termQuery(term, context), BooleanClause.Occur.SHOULD); + } else { + bq.add(new TermQuery(new Term(indexFieldName, BytesRefs.toBytesRef(term))), BooleanClause.Occur.SHOULD); + } + } + query = Queries.applyMinimumShouldMatch(bq.build(), minimumShouldMatch); + } + return query; + } + + @SuppressWarnings("unchecked") + @Override + protected TermsQueryBuilder doReadFrom(StreamInput in) throws IOException { + String field = in.readString(); + TermsLookup lookup = null; + if (in.readBoolean()) { + lookup = TermsLookup.readTermsLookupFrom(in); + } + List values = (List) in.readGenericValue(); + String minimumShouldMatch = in.readOptionalString(); + boolean disableCoord = in.readBoolean(); + return new TermsQueryBuilder(field, values, minimumShouldMatch, disableCoord, lookup); + } + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + out.writeString(fieldName); + out.writeBoolean(termsLookup != null); + if (termsLookup != null) { + termsLookup.writeTo(out); + } + out.writeGenericValue(values); + out.writeOptionalString(minimumShouldMatch); + out.writeBoolean(disableCoord); + } + + @Override + protected int doHashCode() { + return Objects.hash(fieldName, values, minimumShouldMatch, disableCoord, termsLookup); + } + + @Override + protected boolean doEquals(TermsQueryBuilder other) { + return Objects.equals(fieldName, other.fieldName) && + Objects.equals(values, other.values) && + Objects.equals(minimumShouldMatch, other.minimumShouldMatch) && + Objects.equals(disableCoord, other.disableCoord) && + Objects.equals(termsLookup, other.termsLookup); + } +} diff --git a/core/src/main/java/org/elasticsearch/index/query/TermsQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/TermsQueryParser.java index 898b0ff5e95..c76369195a3 100644 --- a/core/src/main/java/org/elasticsearch/index/query/TermsQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/TermsQueryParser.java @@ -19,76 +19,50 @@ package org.elasticsearch.index.query; -import org.apache.lucene.index.Term; -import org.apache.lucene.queries.TermsQuery; -import org.apache.lucene.search.BooleanClause.Occur; -import org.apache.lucene.search.BooleanQuery; -import org.apache.lucene.search.Query; -import org.apache.lucene.search.TermQuery; -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.action.get.GetRequest; -import org.elasticsearch.action.get.GetResponse; -import org.elasticsearch.client.Client; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.lucene.BytesRefs; -import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.support.XContentMapValues; -import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.indices.cache.query.terms.TermsLookup; -import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; import java.util.ArrayList; import java.util.List; /** + * Parser for terms query and terms lookup. * + * Filters documents that have fields that match any of the provided terms (not analyzed) + * + * It also supports a terms lookup mechanism which can be used to fetch the term values from + * a document in an index. */ public class TermsQueryParser implements QueryParser { - public static final String NAME = "terms"; - private static final ParseField MIN_SHOULD_MATCH_FIELD = new ParseField("min_match", "min_should_match").withAllDeprecated("Use [bool] query instead"); + private static final ParseField MIN_SHOULD_MATCH_FIELD = new ParseField("min_match", "min_should_match", "minimum_should_match") + .withAllDeprecated("Use [bool] query instead"); private static final ParseField DISABLE_COORD_FIELD = new ParseField("disable_coord").withAllDeprecated("Use [bool] query instead"); private static final ParseField EXECUTION_FIELD = new ParseField("execution").withAllDeprecated("execution is deprecated and has no effect"); - private Client client; - - @Inject - public TermsQueryParser() { - } @Override public String[] names() { - return new String[]{NAME, "in"}; - } - - @Inject(optional = true) - public void setClient(Client client) { - this.client = client; + return new String[]{TermsQueryBuilder.NAME, "in"}; } @Override - public Query parse(QueryParseContext parseContext) throws IOException, ParsingException { + public QueryBuilder fromXContent(QueryParseContext parseContext) throws IOException { XContentParser parser = parseContext.parser(); - String queryName = null; - String currentFieldName = null; - - String lookupIndex = parseContext.index().name(); - String lookupType = null; - String lookupId = null; - String lookupPath = null; - String lookupRouting = null; + String fieldName = null; + List values = null; String minShouldMatch = null; + boolean disableCoord = TermsQueryBuilder.DEFAULT_DISABLE_COORD; + TermsLookup termsLookup = null; - boolean disableCoord = false; + String queryName = null; + float boost = AbstractQueryBuilder.DEFAULT_BOOST; XContentParser.Token token; - List terms = new ArrayList<>(); - String fieldName = null; - float boost = 1f; + String currentFieldName = null; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); @@ -96,54 +70,19 @@ public class TermsQueryParser implements QueryParser { // skip } else if (token == XContentParser.Token.START_ARRAY) { if (fieldName != null) { - throw new ParsingException(parseContext, "[terms] query does not support multiple fields"); + throw new ParsingException(parser.getTokenLocation(), "[terms] query does not support multiple fields"); } fieldName = currentFieldName; - - while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { - Object value = parser.objectBytes(); - if (value == null) { - throw new ParsingException(parseContext, "No value specified for terms query"); - } - terms.add(value); - } + values = parseValues(parser); } else if (token == XContentParser.Token.START_OBJECT) { fieldName = currentFieldName; - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = parser.currentName(); - } else if (token.isValue()) { - if ("index".equals(currentFieldName)) { - lookupIndex = parser.text(); - } else if ("type".equals(currentFieldName)) { - lookupType = parser.text(); - } else if ("id".equals(currentFieldName)) { - lookupId = parser.text(); - } else if ("path".equals(currentFieldName)) { - lookupPath = parser.text(); - } else if ("routing".equals(currentFieldName)) { - lookupRouting = parser.textOrNull(); - } else { - throw new ParsingException(parseContext, "[terms] query does not support [" + currentFieldName - + "] within lookup element"); - } - } - } - if (lookupType == null) { - throw new ParsingException(parseContext, "[terms] query lookup element requires specifying the type"); - } - if (lookupId == null) { - throw new ParsingException(parseContext, "[terms] query lookup element requires specifying the id"); - } - if (lookupPath == null) { - throw new ParsingException(parseContext, "[terms] query lookup element requires specifying the path"); - } + termsLookup = TermsLookup.parseTermsLookup(parser); } else if (token.isValue()) { if (parseContext.parseFieldMatcher().match(currentFieldName, EXECUTION_FIELD)) { // ignore } else if (parseContext.parseFieldMatcher().match(currentFieldName, MIN_SHOULD_MATCH_FIELD)) { if (minShouldMatch != null) { - throw new IllegalArgumentException("[" + currentFieldName + "] is not allowed in a filter context for the [" + NAME + "] query"); + throw new IllegalArgumentException("[" + currentFieldName + "] is not allowed in a filter context for the [" + TermsQueryBuilder.NAME + "] query"); } minShouldMatch = parser.textOrNull(); } else if ("boost".equals(currentFieldName)) { @@ -153,63 +92,33 @@ public class TermsQueryParser implements QueryParser { } else if ("_name".equals(currentFieldName)) { queryName = parser.text(); } else { - throw new ParsingException(parseContext, "[terms] query does not support [" + currentFieldName + "]"); + throw new ParsingException(parser.getTokenLocation(), "[terms] query does not support [" + currentFieldName + "]"); } } } if (fieldName == null) { - throw new ParsingException(parseContext, "terms query requires a field name, followed by array of terms"); + throw new ParsingException(parser.getTokenLocation(), "terms query requires a field name, followed by array of terms or a document lookup specification"); } + return new TermsQueryBuilder(fieldName, values, minShouldMatch, disableCoord, termsLookup) + .boost(boost) + .queryName(queryName); + } - MappedFieldType fieldType = parseContext.fieldMapper(fieldName); - if (fieldType != null) { - fieldName = fieldType.names().indexName(); - } - - if (lookupId != null) { - final TermsLookup lookup = new TermsLookup(lookupIndex, lookupType, lookupId, lookupRouting, lookupPath, parseContext); - GetRequest getRequest = new GetRequest(lookup.getIndex(), lookup.getType(), lookup.getId()).preference("_local").routing(lookup.getRouting()); - getRequest.copyContextAndHeadersFrom(SearchContext.current()); - final GetResponse getResponse = client.get(getRequest).actionGet(); - if (getResponse.isExists()) { - List values = XContentMapValues.extractRawValues(lookup.getPath(), getResponse.getSourceAsMap()); - terms.addAll(values); + private static List parseValues(XContentParser parser) throws IOException { + List values = new ArrayList<>(); + while (parser.nextToken() != XContentParser.Token.END_ARRAY) { + Object value = parser.objectBytes(); + if (value == null) { + throw new ParsingException(parser.getTokenLocation(), "No value specified for terms query"); } + values.add(value); } + return values; + } - if (terms.isEmpty()) { - return Queries.newMatchNoDocsQuery(); - } - - Query query; - if (parseContext.isFilter()) { - if (fieldType != null) { - query = fieldType.termsQuery(terms, parseContext); - } else { - BytesRef[] filterValues = new BytesRef[terms.size()]; - for (int i = 0; i < filterValues.length; i++) { - filterValues[i] = BytesRefs.toBytesRef(terms.get(i)); - } - query = new TermsQuery(fieldName, filterValues); - } - } else { - BooleanQuery.Builder bq = new BooleanQuery.Builder(); - bq.setDisableCoord(disableCoord); - for (Object term : terms) { - if (fieldType != null) { - bq.add(fieldType.termQuery(term, parseContext), Occur.SHOULD); - } else { - bq.add(new TermQuery(new Term(fieldName, BytesRefs.toBytesRef(term))), Occur.SHOULD); - } - } - query = Queries.applyMinimumShouldMatch(bq.build(), minShouldMatch); - } - query.setBoost(boost); - - if (queryName != null) { - parseContext.addNamedQuery(queryName, query); - } - return query; + @Override + public TermsQueryBuilder getBuilderPrototype() { + return TermsQueryBuilder.PROTOTYPE; } } diff --git a/core/src/main/java/org/elasticsearch/index/query/TypeQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/TypeQueryBuilder.java index 2a9a6c500a6..94cdc243bf8 100644 --- a/core/src/main/java/org/elasticsearch/index/query/TypeQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/TypeQueryBuilder.java @@ -19,22 +19,89 @@ package org.elasticsearch.index.query; +import org.apache.lucene.index.Term; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.TermQuery; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.mapper.DocumentMapper; +import org.elasticsearch.index.mapper.internal.TypeFieldMapper; import java.io.IOException; +import java.util.Objects; -public class TypeQueryBuilder extends QueryBuilder { +public class TypeQueryBuilder extends AbstractQueryBuilder { - private final String type; + public static final String NAME = "type"; + + private final BytesRef type; + + static final TypeQueryBuilder PROTOTYPE = new TypeQueryBuilder("type"); public TypeQueryBuilder(String type) { + if (type == null) { + throw new IllegalArgumentException("[type] cannot be null"); + } + this.type = BytesRefs.toBytesRef(type); + } + + TypeQueryBuilder(BytesRef type) { + if (type == null) { + throw new IllegalArgumentException("[type] cannot be null"); + } this.type = type; } + public String type() { + return BytesRefs.toString(this.type); + } + @Override protected void doXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(TypeQueryParser.NAME); - builder.field("value", type); + builder.startObject(NAME); + builder.field("value", type.utf8ToString()); + printBoostAndQueryName(builder); builder.endObject(); } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + protected Query doToQuery(QueryShardContext context) throws IOException { + Query filter; + //LUCENE 4 UPGRADE document mapper should use bytesref as well? + DocumentMapper documentMapper = context.mapperService().documentMapper(type.utf8ToString()); + if (documentMapper == null) { + filter = new TermQuery(new Term(TypeFieldMapper.NAME, type)); + } else { + filter = documentMapper.typeFilter(); + } + return filter; + } + + @Override + protected TypeQueryBuilder doReadFrom(StreamInput in) throws IOException { + return new TypeQueryBuilder(in.readBytesRef()); + } + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + out.writeBytesRef(type); + } + + @Override + protected int doHashCode() { + return Objects.hash(type); + } + + @Override + protected boolean doEquals(TypeQueryBuilder other) { + return Objects.equals(type, other.type); + } } \ No newline at end of file diff --git a/core/src/main/java/org/elasticsearch/index/query/TypeQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/TypeQueryParser.java index cf834be2bf7..e2b4e13c65e 100644 --- a/core/src/main/java/org/elasticsearch/index/query/TypeQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/TypeQueryParser.java @@ -19,59 +19,58 @@ package org.elasticsearch.index.query; -import org.apache.lucene.index.Term; -import org.apache.lucene.search.Query; -import org.apache.lucene.search.TermQuery; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.ParsingException; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.internal.TypeFieldMapper; import java.io.IOException; -public class TypeQueryParser implements QueryParser { - - public static final String NAME = "type"; - - @Inject - public TypeQueryParser() { - } +/** + * Parser for type query + */ +public class TypeQueryParser implements QueryParser { @Override public String[] names() { - return new String[]{NAME}; + return new String[]{TypeQueryBuilder.NAME}; } @Override - public Query parse(QueryParseContext parseContext) throws IOException, ParsingException { + public TypeQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException { XContentParser parser = parseContext.parser(); + BytesRef type = null; - XContentParser.Token token = parser.nextToken(); - if (token != XContentParser.Token.FIELD_NAME) { - throw new ParsingException(parseContext, "[type] filter should have a value field, and the type name"); - } - String fieldName = parser.currentName(); - if (!fieldName.equals("value")) { - throw new ParsingException(parseContext, "[type] filter should have a value field, and the type name"); - } - token = parser.nextToken(); - if (token != XContentParser.Token.VALUE_STRING) { - throw new ParsingException(parseContext, "[type] filter should have a value field, and the type name"); - } - BytesRef type = parser.utf8Bytes(); - // move to the next token - parser.nextToken(); + String queryName = null; + float boost = AbstractQueryBuilder.DEFAULT_BOOST; - Query filter; - //LUCENE 4 UPGRADE document mapper should use bytesref as well? - DocumentMapper documentMapper = parseContext.mapperService().documentMapper(type.utf8ToString()); - if (documentMapper == null) { - filter = new TermQuery(new Term(TypeFieldMapper.NAME, type)); - } else { - filter = documentMapper.typeFilter(); + String currentFieldName = null; + XContentParser.Token token; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + } else if (token.isValue()) { + if ("_name".equals(currentFieldName)) { + queryName = parser.text(); + } else if ("boost".equals(currentFieldName)) { + boost = parser.floatValue(); + } else if ("value".equals(currentFieldName)) { + type = parser.utf8Bytes(); + } + } else { + throw new ParsingException(parser.getTokenLocation(), "[type] filter doesn't support [" + currentFieldName + "]"); + } } - return filter; + + if (type == null) { + throw new ParsingException(parser.getTokenLocation(), "[type] filter needs to be provided with a value for the type"); + } + return new TypeQueryBuilder(type) + .boost(boost) + .queryName(queryName); + } + + @Override + public TypeQueryBuilder getBuilderPrototype() { + return TypeQueryBuilder.PROTOTYPE; } } \ No newline at end of file diff --git a/core/src/main/java/org/elasticsearch/index/query/WildcardQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/WildcardQueryBuilder.java index 654f14ee509..44775926400 100644 --- a/core/src/main/java/org/elasticsearch/index/query/WildcardQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/WildcardQueryBuilder.java @@ -19,9 +19,20 @@ package org.elasticsearch.index.query; +import org.apache.lucene.index.Term; +import org.apache.lucene.search.MultiTermQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.WildcardQuery; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.query.support.QueryParsers; import java.io.IOException; +import java.util.Objects; /** * Implements the wildcard search query. Supported wildcards are *, which @@ -31,17 +42,17 @@ import java.io.IOException; * a Wildcard term should not start with one of the wildcards * or * ?. */ -public class WildcardQueryBuilder extends MultiTermQueryBuilder implements BoostableQueryBuilder { +public class WildcardQueryBuilder extends AbstractQueryBuilder implements MultiTermQueryBuilder { - private final String name; + public static final String NAME = "wildcard"; - private final String wildcard; + private final String fieldName; - private float boost = -1; + private final String value; private String rewrite; - private String queryName; + static final WildcardQueryBuilder PROTOTYPE = new WildcardQueryBuilder("field", "value"); /** * Implements the wildcard search query. Supported wildcards are *, which @@ -51,12 +62,26 @@ public class WildcardQueryBuilder extends MultiTermQueryBuilder implements Boost * a Wildcard term should not start with one of the wildcards * or * ?. * - * @param name The field name - * @param wildcard The wildcard query string + * @param fieldName The field name + * @param value The wildcard query string */ - public WildcardQueryBuilder(String name, String wildcard) { - this.name = name; - this.wildcard = wildcard; + public WildcardQueryBuilder(String fieldName, String value) { + if (Strings.isEmpty(fieldName)) { + throw new IllegalArgumentException("field name is null or empty"); + } + if (value == null) { + throw new IllegalArgumentException("value cannot be null."); + } + this.fieldName = fieldName; + this.value = value; + } + + public String fieldName() { + return fieldName; + } + + public String value() { + return value; } public WildcardQueryBuilder rewrite(String rewrite) { @@ -64,43 +89,71 @@ public class WildcardQueryBuilder extends MultiTermQueryBuilder implements Boost return this; } - /** - * Sets the boost for this query. Documents matching this query will (in addition to the normal - * weightings) have their score multiplied by the boost provided. - */ - @Override - public WildcardQueryBuilder boost(float boost) { - this.boost = boost; - return this; + public String rewrite() { + return this.rewrite; } - /** - * Sets the query name for the filter that can be used when searching for matched_filters per hit. - */ - public WildcardQueryBuilder queryName(String queryName) { - this.queryName = queryName; - return this; + @Override + public String getWriteableName() { + return NAME; } @Override public void doXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(WildcardQueryParser.NAME); - if (boost == -1 && rewrite == null && queryName == null) { - builder.field(name, wildcard); - } else { - builder.startObject(name); - builder.field("wildcard", wildcard); - if (boost != -1) { - builder.field("boost", boost); - } - if (rewrite != null) { - builder.field("rewrite", rewrite); - } - if (queryName != null) { - builder.field("_name", queryName); - } - builder.endObject(); + builder.startObject(NAME); + builder.startObject(fieldName); + builder.field("wildcard", value); + if (rewrite != null) { + builder.field("rewrite", rewrite); } + printBoostAndQueryName(builder); + builder.endObject(); builder.endObject(); } -} \ No newline at end of file + + @Override + protected Query doToQuery(QueryShardContext context) throws IOException { + String indexFieldName; + BytesRef valueBytes; + + MappedFieldType fieldType = context.fieldMapper(fieldName); + if (fieldType != null) { + indexFieldName = fieldType.names().indexName(); + valueBytes = fieldType.indexedValueForSearch(value); + } else { + indexFieldName = fieldName; + valueBytes = new BytesRef(value); + } + + WildcardQuery query = new WildcardQuery(new Term(indexFieldName, valueBytes)); + MultiTermQuery.RewriteMethod rewriteMethod = QueryParsers.parseRewriteMethod(context.parseFieldMatcher(), rewrite, null); + QueryParsers.setRewriteMethod(query, rewriteMethod); + return query; + } + + @Override + protected WildcardQueryBuilder doReadFrom(StreamInput in) throws IOException { + WildcardQueryBuilder wildcardQueryBuilder = new WildcardQueryBuilder(in.readString(), in.readString()); + wildcardQueryBuilder.rewrite = in.readOptionalString(); + return wildcardQueryBuilder; + } + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + out.writeString(fieldName); + out.writeString(value); + out.writeOptionalString(rewrite); + } + + @Override + protected int doHashCode() { + return Objects.hash(fieldName, value, rewrite); + } + + @Override + protected boolean doEquals(WildcardQueryBuilder other) { + return Objects.equals(fieldName, other.fieldName) && + Objects.equals(value, other.value) && + Objects.equals(rewrite, other.rewrite); + } +} diff --git a/core/src/main/java/org/elasticsearch/index/query/WildcardQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/WildcardQueryParser.java index ee1a42c81c3..4967f2e11c4 100644 --- a/core/src/main/java/org/elasticsearch/index/query/WildcardQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/WildcardQueryParser.java @@ -19,47 +19,34 @@ package org.elasticsearch.index.query; -import org.apache.lucene.index.Term; -import org.apache.lucene.search.Query; -import org.apache.lucene.search.WildcardQuery; -import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.ParsingException; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.query.support.QueryParsers; import java.io.IOException; /** - * + * Parser for wildcard query */ -public class WildcardQueryParser implements QueryParser { - - public static final String NAME = "wildcard"; - - @Inject - public WildcardQueryParser() { - } +public class WildcardQueryParser implements QueryParser { @Override public String[] names() { - return new String[]{NAME}; + return new String[]{WildcardQueryBuilder.NAME}; } @Override - public Query parse(QueryParseContext parseContext) throws IOException, ParsingException { + public WildcardQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException { XContentParser parser = parseContext.parser(); XContentParser.Token token = parser.nextToken(); if (token != XContentParser.Token.FIELD_NAME) { - throw new ParsingException(parseContext, "[wildcard] query malformed, no field"); + throw new ParsingException(parser.getTokenLocation(), "[wildcard] query malformed, no field"); } String fieldName = parser.currentName(); - String rewriteMethod = null; + String rewrite = null; String value = null; - float boost = 1.0f; + float boost = AbstractQueryBuilder.DEFAULT_BOOST; String queryName = null; token = parser.nextToken(); if (token == XContentParser.Token.START_OBJECT) { @@ -75,11 +62,11 @@ public class WildcardQueryParser implements QueryParser { } else if ("boost".equals(currentFieldName)) { boost = parser.floatValue(); } else if ("rewrite".equals(currentFieldName)) { - rewriteMethod = parser.textOrNull(); + rewrite = parser.textOrNull(); } else if ("_name".equals(currentFieldName)) { queryName = parser.text(); } else { - throw new ParsingException(parseContext, "[wildcard] query does not support [" + currentFieldName + "]"); + throw new ParsingException(parser.getTokenLocation(), "[wildcard] query does not support [" + currentFieldName + "]"); } } } @@ -90,24 +77,16 @@ public class WildcardQueryParser implements QueryParser { } if (value == null) { - throw new ParsingException(parseContext, "No value specified for prefix query"); + throw new ParsingException(parser.getTokenLocation(), "No value specified for prefix query"); } - - BytesRef valueBytes; - MappedFieldType fieldType = parseContext.fieldMapper(fieldName); - if (fieldType != null) { - fieldName = fieldType.names().indexName(); - valueBytes = fieldType.indexedValueForSearch(value); - } else { - valueBytes = new BytesRef(value); - } - - WildcardQuery wildcardQuery = new WildcardQuery(new Term(fieldName, valueBytes)); - QueryParsers.setRewriteMethod(wildcardQuery, parseContext.parseFieldMatcher(), rewriteMethod); - wildcardQuery.setBoost(boost); - if (queryName != null) { - parseContext.addNamedQuery(queryName, wildcardQuery); - } - return wildcardQuery; + return new WildcardQueryBuilder(fieldName, value) + .rewrite(rewrite) + .boost(boost) + .queryName(queryName); } -} \ No newline at end of file + + @Override + public WildcardQueryBuilder getBuilderPrototype() { + return WildcardQueryBuilder.PROTOTYPE; + } +} diff --git a/core/src/main/java/org/elasticsearch/index/query/WrapperQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/WrapperQueryBuilder.java index e7de5fd0480..ef106a4354e 100644 --- a/core/src/main/java/org/elasticsearch/index/query/WrapperQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/WrapperQueryBuilder.java @@ -19,11 +19,20 @@ package org.elasticsearch.index.query; +import org.apache.lucene.search.Query; + import java.nio.charset.StandardCharsets; + +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; +import java.util.Arrays; /** * A Query builder which allows building a query given JSON string or binary data provided as input. This is useful when you want @@ -39,43 +48,96 @@ import java.io.IOException; * * */ -public class WrapperQueryBuilder extends QueryBuilder { +public class WrapperQueryBuilder extends AbstractQueryBuilder { + public static final String NAME = "wrapper"; private final byte[] source; - private final int offset; - private final int length; + static final WrapperQueryBuilder PROTOTYPE = new WrapperQueryBuilder((byte[]) new byte[]{0}); + + /** + * Creates a query builder given a query provided as a bytes array + */ + public WrapperQueryBuilder(byte[] source) { + if (source == null || source.length == 0) { + throw new IllegalArgumentException("query source text cannot be null or empty"); + } + this.source = source; + } /** * Creates a query builder given a query provided as a string */ public WrapperQueryBuilder(String source) { + if (Strings.isEmpty(source)) { + throw new IllegalArgumentException("query source string cannot be null or empty"); + } this.source = source.getBytes(StandardCharsets.UTF_8); - this.offset = 0; - this.length = this.source.length; - } - - /** - * Creates a query builder given a query provided as a bytes array - */ - public WrapperQueryBuilder(byte[] source, int offset, int length) { - this.source = source; - this.offset = offset; - this.length = length; } /** * Creates a query builder given a query provided as a {@link BytesReference} */ public WrapperQueryBuilder(BytesReference source) { + if (source == null || source.length() == 0) { + throw new IllegalArgumentException("query source text cannot be null or empty"); + } this.source = source.array(); - this.offset = source.arrayOffset(); - this.length = source.length(); + } + + public byte[] source() { + return this.source; + } + + @Override + public String getName() { + return NAME; } @Override protected void doXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(WrapperQueryParser.NAME); - builder.field("query", source, offset, length); + builder.startObject(NAME); + builder.field("query", source); builder.endObject(); } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + protected Query doToQuery(QueryShardContext context) throws IOException { + try (XContentParser qSourceParser = XContentFactory.xContent(source).createParser(source)) { + final QueryShardContext contextCopy = new QueryShardContext(context.index(), context.indexQueryParserService()); + contextCopy.reset(qSourceParser); + QueryBuilder result = contextCopy.parseContext().parseInnerQueryBuilder(); + context.combineNamedQueries(contextCopy); + return result.toQuery(context); + } + } + + @Override + protected void setFinalBoost(Query query) { + //no-op this query doesn't support boost + } + + @Override + protected WrapperQueryBuilder doReadFrom(StreamInput in) throws IOException { + return new WrapperQueryBuilder(in.readByteArray()); + } + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + out.writeByteArray(this.source); + } + + @Override + protected int doHashCode() { + return Arrays.hashCode(source); + } + + @Override + protected boolean doEquals(WrapperQueryBuilder other) { + return Arrays.equals(source, other.source); // otherwise we compare pointers + } } diff --git a/core/src/main/java/org/elasticsearch/index/query/WrapperQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/WrapperQueryParser.java index d64d8c1a5ea..59c570e97f9 100644 --- a/core/src/main/java/org/elasticsearch/index/query/WrapperQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/WrapperQueryParser.java @@ -19,10 +19,7 @@ package org.elasticsearch.index.query; -import org.apache.lucene.search.Query; import org.elasticsearch.common.ParsingException; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; @@ -32,39 +29,37 @@ import java.io.IOException; */ public class WrapperQueryParser implements QueryParser { - public static final String NAME = "wrapper"; - - @Inject - public WrapperQueryParser() { - } - @Override public String[] names() { - return new String[]{NAME}; + return new String[]{WrapperQueryBuilder.NAME}; } @Override - public Query parse(QueryParseContext parseContext) throws IOException, ParsingException { + public QueryBuilder fromXContent(QueryParseContext parseContext) throws IOException { XContentParser parser = parseContext.parser(); XContentParser.Token token = parser.nextToken(); if (token != XContentParser.Token.FIELD_NAME) { - throw new ParsingException(parseContext, "[wrapper] query malformed"); + throw new ParsingException(parser.getTokenLocation(), "[wrapper] query malformed"); } String fieldName = parser.currentName(); if (!fieldName.equals("query")) { - throw new ParsingException(parseContext, "[wrapper] query malformed"); + throw new ParsingException(parser.getTokenLocation(), "[wrapper] query malformed"); } parser.nextToken(); - byte[] querySource = parser.binaryValue(); - try (XContentParser qSourceParser = XContentFactory.xContent(querySource).createParser(querySource)) { - final QueryParseContext context = new QueryParseContext(parseContext.index(), parseContext.indexQueryParserService()); - context.reset(qSourceParser); - Query result = context.parseInnerQuery(); - parser.nextToken(); - parseContext.combineNamedQueries(context); - return result; + byte[] source = parser.binaryValue(); + + parser.nextToken(); + + if (source == null) { + throw new ParsingException(parser.getTokenLocation(), "wrapper query has no [query] specified"); } + return new WrapperQueryBuilder(source); + } + + @Override + public WrapperQueryBuilder getBuilderPrototype() { + return WrapperQueryBuilder.PROTOTYPE; } } diff --git a/core/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunction.java b/core/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunction.java index 44e3763b854..85d755c2441 100644 --- a/core/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunction.java +++ b/core/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunction.java @@ -32,9 +32,9 @@ import org.elasticsearch.index.query.functionscore.gauss.GaussDecayFunctionParse public interface DecayFunction { - public double evaluate(double value, double scale); + double evaluate(double value, double scale); - public Explanation explainFunction(String valueString, double value, double scale); + Explanation explainFunction(String valueString, double value, double scale); /** * The final scale parameter is computed from the scale parameter given by @@ -49,6 +49,5 @@ public interface DecayFunction { * the value which decay function should take once the distance * reaches this scale * */ - public double processScale(double scale, double decay); - + double processScale(double scale, double decay); } diff --git a/core/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunctionBuilder.java b/core/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunctionBuilder.java index 96aabb2cc44..8302b874533 100644 --- a/core/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunctionBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunctionBuilder.java @@ -19,73 +19,515 @@ package org.elasticsearch.index.query.functionscore; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.Explanation; +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.geo.GeoDistance; +import org.elasticsearch.common.geo.GeoPoint; +import org.elasticsearch.common.geo.GeoUtils; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.lucene.search.function.CombineFunction; +import org.elasticsearch.common.lucene.search.function.LeafScoreFunction; +import org.elasticsearch.common.lucene.search.function.ScoreFunction; +import org.elasticsearch.common.unit.DistanceUnit; +import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.fielddata.*; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.core.DateFieldMapper; +import org.elasticsearch.index.mapper.core.NumberFieldMapper; +import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper; +import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.search.MultiValueMode; import java.io.IOException; -import java.util.Locale; +import java.util.Objects; -public abstract class DecayFunctionBuilder extends ScoreFunctionBuilder { +public abstract class DecayFunctionBuilder extends ScoreFunctionBuilder { protected static final String ORIGIN = "origin"; protected static final String SCALE = "scale"; protected static final String DECAY = "decay"; protected static final String OFFSET = "offset"; - private String fieldName; - private Object origin; - private Object scale; - private double decay = -1; - private Object offset; - private MultiValueMode multiValueMode = null; + public static double DEFAULT_DECAY = 0.5; + public static MultiValueMode DEFAULT_MULTI_VALUE_MODE = MultiValueMode.MIN; - public DecayFunctionBuilder(String fieldName, Object origin, Object scale) { - this.fieldName = fieldName; - this.origin = origin; - this.scale = scale; + private final String fieldName; + //parsing of origin, scale, offset and decay depends on the field type, delayed to the data node that has the mapping for it + private final BytesReference functionBytes; + private MultiValueMode multiValueMode = DEFAULT_MULTI_VALUE_MODE; + + protected DecayFunctionBuilder(String fieldName, Object origin, Object scale, Object offset) { + this(fieldName, origin, scale, offset, DEFAULT_DECAY); } - public DecayFunctionBuilder setDecay(double decay) { - if (decay <= 0 || decay >= 1.0) { - throw new IllegalStateException("scale weight parameter must be in range 0..1!"); + protected DecayFunctionBuilder(String fieldName, Object origin, Object scale, Object offset, double decay) { + if (fieldName == null) { + throw new IllegalArgumentException("decay function: field name must not be null"); + } + if (scale == null) { + throw new IllegalArgumentException("decay function: scale must not be null"); + } + if (decay <= 0 || decay >= 1.0) { + throw new IllegalStateException("decay function: decay must be in range 0..1!"); + } + this.fieldName = fieldName; + try { + XContentBuilder builder = XContentFactory.jsonBuilder(); + builder.startObject(); + if (origin != null) { + builder.field(ORIGIN, origin); + } + builder.field(SCALE, scale); + if (offset != null) { + builder.field(OFFSET, offset); + } + builder.field(DECAY, decay); + builder.endObject(); + this.functionBytes = builder.bytes(); + } catch (IOException e) { + throw new IllegalArgumentException("unable to build inner function object",e); } - this.decay = decay; - return this; } - public DecayFunctionBuilder setOffset(Object offset) { - this.offset = offset; - return this; + protected DecayFunctionBuilder(String fieldName, BytesReference functionBytes) { + if (fieldName == null) { + throw new IllegalArgumentException("decay function: field name must not be null"); + } + if (functionBytes == null) { + throw new IllegalArgumentException("decay function: function must not be null"); + } + this.fieldName = fieldName; + this.functionBytes = functionBytes; + } + + public String getFieldName() { + return this.fieldName; + } + + public BytesReference getFunctionBytes() { + return this.functionBytes; } @Override public void doXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(getName()); - builder.startObject(fieldName); - if (origin != null) { - builder.field(ORIGIN, origin); - } - builder.field(SCALE, scale); - if (decay > 0) { - builder.field(DECAY, decay); - } - if (offset != null) { - builder.field(OFFSET, offset); - } - builder.endObject(); - if (multiValueMode != null) { - builder.field(DecayFunctionParser.MULTI_VALUE_MODE.getPreferredName(), multiValueMode.name()); - } + builder.field(fieldName); + XContentParser parser = XContentFactory.xContent(functionBytes).createParser(functionBytes); + builder.copyCurrentStructure(parser); + builder.field(DecayFunctionParser.MULTI_VALUE_MODE.getPreferredName(), multiValueMode.name()); builder.endObject(); } public ScoreFunctionBuilder setMultiValueMode(MultiValueMode multiValueMode) { + if (multiValueMode == null) { + throw new IllegalArgumentException("decay function: multi_value_mode must not be null"); + } this.multiValueMode = multiValueMode; return this; } - public ScoreFunctionBuilder setMultiValueMode(String multiValueMode) { - this.multiValueMode = MultiValueMode.fromString(multiValueMode.toUpperCase(Locale.ROOT)); - return this; + public MultiValueMode getMultiValueMode() { + return this.multiValueMode; + } + + @Override + protected DFB doReadFrom(StreamInput in) throws IOException { + DFB decayFunctionBuilder = createFunctionBuilder(in.readString(), in.readBytesReference()); + decayFunctionBuilder.setMultiValueMode(MultiValueMode.readMultiValueModeFrom(in)); + return decayFunctionBuilder; + } + + protected abstract DFB createFunctionBuilder(String fieldName, BytesReference functionBytes); + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + out.writeString(fieldName); + out.writeBytesReference(functionBytes); + multiValueMode.writeTo(out); + } + + @Override + protected boolean doEquals(DFB functionBuilder) { + return Objects.equals(this.fieldName, functionBuilder.getFieldName()) && + Objects.equals(this.functionBytes, functionBuilder.getFunctionBytes()) && + Objects.equals(this.multiValueMode, functionBuilder.getMultiValueMode()); + } + + @Override + protected int doHashCode() { + return Objects.hash(this.fieldName, this.functionBytes, this.multiValueMode); + } + + @Override + protected ScoreFunction doToFunction(QueryShardContext context) throws IOException { + XContentParser parser = XContentFactory.xContent(functionBytes).createParser(functionBytes); + return parseVariable(fieldName, parser, context, multiValueMode); + } + + /** + * Override this function if you want to produce your own scorer. + * */ + protected abstract DecayFunction getDecayFunction(); + + private AbstractDistanceScoreFunction parseVariable(String fieldName, XContentParser parser, QueryShardContext context, MultiValueMode mode) throws IOException { + //the field must exist, else we cannot read the value for the doc later + MappedFieldType fieldType = context.fieldMapper(fieldName); + if (fieldType == null) { + throw new ParsingException(parser.getTokenLocation(), "unknown field [{}]", fieldName); + } + + // dates and time need special handling + parser.nextToken(); + if (fieldType instanceof DateFieldMapper.DateFieldType) { + return parseDateVariable(parser, context, (DateFieldMapper.DateFieldType) fieldType, mode); + } else if (fieldType instanceof GeoPointFieldMapper.GeoPointFieldType) { + return parseGeoVariable(parser, context, (GeoPointFieldMapper.GeoPointFieldType) fieldType, mode); + } else if (fieldType instanceof NumberFieldMapper.NumberFieldType) { + return parseNumberVariable(parser, context, (NumberFieldMapper.NumberFieldType) fieldType, mode); + } else { + throw new ParsingException(parser.getTokenLocation(), "field [{}] is of type [{}], but only numeric types are supported.", fieldName, fieldType); + } + } + + private AbstractDistanceScoreFunction parseNumberVariable(XContentParser parser, QueryShardContext context, + NumberFieldMapper.NumberFieldType fieldType, MultiValueMode mode) throws IOException { + XContentParser.Token token; + String parameterName = null; + double scale = 0; + double origin = 0; + double decay = 0.5; + double offset = 0.0d; + boolean scaleFound = false; + boolean refFound = false; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + parameterName = parser.currentName(); + } else if (DecayFunctionBuilder.SCALE.equals(parameterName)) { + scale = parser.doubleValue(); + scaleFound = true; + } else if (DecayFunctionBuilder.DECAY.equals(parameterName)) { + decay = parser.doubleValue(); + } else if (DecayFunctionBuilder.ORIGIN.equals(parameterName)) { + origin = parser.doubleValue(); + refFound = true; + } else if (DecayFunctionBuilder.OFFSET.equals(parameterName)) { + offset = parser.doubleValue(); + } else { + throw new ElasticsearchParseException("parameter [{}] not supported!", parameterName); + } + } + if (!scaleFound || !refFound) { + throw new ElasticsearchParseException("both [{}] and [{}] must be set for numeric fields.", DecayFunctionBuilder.SCALE, DecayFunctionBuilder.ORIGIN); + } + IndexNumericFieldData numericFieldData = context.getForField(fieldType); + return new NumericFieldDataScoreFunction(origin, scale, decay, offset, getDecayFunction(), numericFieldData, mode); + } + + private AbstractDistanceScoreFunction parseGeoVariable(XContentParser parser, QueryShardContext context, + GeoPointFieldMapper.GeoPointFieldType fieldType, MultiValueMode mode) throws IOException { + XContentParser.Token token; + String parameterName = null; + GeoPoint origin = new GeoPoint(); + String scaleString = null; + String offsetString = "0km"; + double decay = 0.5; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + parameterName = parser.currentName(); + } else if (DecayFunctionBuilder.SCALE.equals(parameterName)) { + scaleString = parser.text(); + } else if (DecayFunctionBuilder.ORIGIN.equals(parameterName)) { + origin = GeoUtils.parseGeoPoint(parser); + } else if (DecayFunctionBuilder.DECAY.equals(parameterName)) { + decay = parser.doubleValue(); + } else if (DecayFunctionBuilder.OFFSET.equals(parameterName)) { + offsetString = parser.text(); + } else { + throw new ElasticsearchParseException("parameter [{}] not supported!", parameterName); + } + } + if (origin == null || scaleString == null) { + throw new ElasticsearchParseException("[{}] and [{}] must be set for geo fields.", DecayFunctionBuilder.ORIGIN, DecayFunctionBuilder.SCALE); + } + double scale = DistanceUnit.DEFAULT.parse(scaleString, DistanceUnit.DEFAULT); + double offset = DistanceUnit.DEFAULT.parse(offsetString, DistanceUnit.DEFAULT); + IndexGeoPointFieldData indexFieldData = context.getForField(fieldType); + return new GeoFieldDataScoreFunction(origin, scale, decay, offset, getDecayFunction(), indexFieldData, mode); + + } + + private AbstractDistanceScoreFunction parseDateVariable(XContentParser parser, QueryShardContext context, + DateFieldMapper.DateFieldType dateFieldType, MultiValueMode mode) throws IOException { + XContentParser.Token token; + String parameterName = null; + String scaleString = null; + String originString = null; + String offsetString = "0d"; + double decay = 0.5; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + parameterName = parser.currentName(); + } else if (DecayFunctionBuilder.SCALE.equals(parameterName)) { + scaleString = parser.text(); + } else if (DecayFunctionBuilder.ORIGIN.equals(parameterName)) { + originString = parser.text(); + } else if (DecayFunctionBuilder.DECAY.equals(parameterName)) { + decay = parser.doubleValue(); + } else if (DecayFunctionBuilder.OFFSET.equals(parameterName)) { + offsetString = parser.text(); + } else { + throw new ElasticsearchParseException("parameter [{}] not supported!", parameterName); + } + } + long origin; + if (originString == null) { + origin = context.nowInMillis(); + } else { + origin = dateFieldType.parseToMilliseconds(originString, false, null, null); + } + + if (scaleString == null) { + throw new ElasticsearchParseException("[{}] must be set for date fields.", DecayFunctionBuilder.SCALE); + } + TimeValue val = TimeValue.parseTimeValue(scaleString, TimeValue.timeValueHours(24), DecayFunctionParser.class.getSimpleName() + ".scale"); + double scale = val.getMillis(); + val = TimeValue.parseTimeValue(offsetString, TimeValue.timeValueHours(24), DecayFunctionParser.class.getSimpleName() + ".offset"); + double offset = val.getMillis(); + IndexNumericFieldData numericFieldData = context.getForField(dateFieldType); + return new NumericFieldDataScoreFunction(origin, scale, decay, offset, getDecayFunction(), numericFieldData, mode); + } + + static class GeoFieldDataScoreFunction extends AbstractDistanceScoreFunction { + + private final GeoPoint origin; + private final IndexGeoPointFieldData fieldData; + + private static final GeoDistance distFunction = GeoDistance.DEFAULT; + + public GeoFieldDataScoreFunction(GeoPoint origin, double scale, double decay, double offset, DecayFunction func, + IndexGeoPointFieldData fieldData, MultiValueMode mode) { + super(scale, decay, offset, func, mode); + this.origin = origin; + this.fieldData = fieldData; + } + + @Override + public boolean needsScores() { + return false; + } + + @Override + protected NumericDoubleValues distance(LeafReaderContext context) { + final MultiGeoPointValues geoPointValues = fieldData.load(context).getGeoPointValues(); + return mode.select(new MultiValueMode.UnsortedNumericDoubleValues() { + @Override + public int count() { + return geoPointValues.count(); + } + + @Override + public void setDocument(int docId) { + geoPointValues.setDocument(docId); + } + + @Override + public double valueAt(int index) { + GeoPoint other = geoPointValues.valueAt(index); + return Math.max(0.0d, distFunction.calculate(origin.lat(), origin.lon(), other.lat(), other.lon(), DistanceUnit.METERS) - offset); + } + }, 0.0); + } + + @Override + protected String getDistanceString(LeafReaderContext ctx, int docId) { + StringBuilder values = new StringBuilder(mode.name()); + values.append(" of: ["); + final MultiGeoPointValues geoPointValues = fieldData.load(ctx).getGeoPointValues(); + geoPointValues.setDocument(docId); + final int num = geoPointValues.count(); + if (num > 0) { + for (int i = 0; i < num; i++) { + GeoPoint value = geoPointValues.valueAt(i); + values.append("Math.max(arcDistance("); + values.append(value).append("(=doc value),").append(origin).append("(=origin)) - ").append(offset).append("(=offset), 0)"); + if (i != num - 1) { + values.append(", "); + } + } + } else { + values.append("0.0"); + } + values.append("]"); + return values.toString(); + } + + @Override + protected String getFieldName() { + return fieldData.getFieldNames().fullName(); + } + + @Override + protected boolean doEquals(ScoreFunction other) { + GeoFieldDataScoreFunction geoFieldDataScoreFunction = (GeoFieldDataScoreFunction) other; + return super.doEquals(other) && + Objects.equals(this.origin, geoFieldDataScoreFunction.origin); + } + } + + static class NumericFieldDataScoreFunction extends AbstractDistanceScoreFunction { + + private final IndexNumericFieldData fieldData; + private final double origin; + + public NumericFieldDataScoreFunction(double origin, double scale, double decay, double offset, DecayFunction func, + IndexNumericFieldData fieldData, MultiValueMode mode) { + super(scale, decay, offset, func, mode); + this.fieldData = fieldData; + this.origin = origin; + } + + @Override + public boolean needsScores() { + return false; + } + + @Override + protected NumericDoubleValues distance(LeafReaderContext context) { + final SortedNumericDoubleValues doubleValues = fieldData.load(context).getDoubleValues(); + return mode.select(new MultiValueMode.UnsortedNumericDoubleValues() { + @Override + public int count() { + return doubleValues.count(); + } + + @Override + public void setDocument(int docId) { + doubleValues.setDocument(docId); + } + + @Override + public double valueAt(int index) { + return Math.max(0.0d, Math.abs(doubleValues.valueAt(index) - origin) - offset); + } + }, 0.0); + } + + @Override + protected String getDistanceString(LeafReaderContext ctx, int docId) { + + StringBuilder values = new StringBuilder(mode.name()); + values.append("["); + final SortedNumericDoubleValues doubleValues = fieldData.load(ctx).getDoubleValues(); + doubleValues.setDocument(docId); + final int num = doubleValues.count(); + if (num > 0) { + for (int i = 0; i < num; i++) { + double value = doubleValues.valueAt(i); + values.append("Math.max(Math.abs("); + values.append(value).append("(=doc value) - ").append(origin).append("(=origin))) - ").append(offset).append("(=offset), 0)"); + if (i != num - 1) { + values.append(", "); + } + } + } else { + values.append("0.0"); + } + values.append("]"); + return values.toString(); + + } + + @Override + protected String getFieldName() { + return fieldData.getFieldNames().fullName(); + } + + @Override + protected boolean doEquals(ScoreFunction other) { + NumericFieldDataScoreFunction numericFieldDataScoreFunction = (NumericFieldDataScoreFunction) other; + if (super.doEquals(other) == false) { + return false; + } + return Objects.equals(this.origin, numericFieldDataScoreFunction.origin); + } + } + + /** + * This is the base class for scoring a single field. + * + * */ + public static abstract class AbstractDistanceScoreFunction extends ScoreFunction { + + private final double scale; + protected final double offset; + private final DecayFunction func; + protected final MultiValueMode mode; + + public AbstractDistanceScoreFunction(double userSuppiedScale, double decay, double offset, DecayFunction func, MultiValueMode mode) { + super(CombineFunction.MULTIPLY); + this.mode = mode; + if (userSuppiedScale <= 0.0) { + throw new IllegalArgumentException(FunctionScoreQueryBuilder.NAME + " : scale must be > 0.0."); + } + if (decay <= 0.0 || decay >= 1.0) { + throw new IllegalArgumentException(FunctionScoreQueryBuilder.NAME + + " : decay must be in the range [0..1]."); + } + this.scale = func.processScale(userSuppiedScale, decay); + this.func = func; + if (offset < 0.0d) { + throw new IllegalArgumentException(FunctionScoreQueryBuilder.NAME + " : offset must be > 0.0"); + } + this.offset = offset; + } + + /** + * This function computes the distance from a defined origin. Since + * the value of the document is read from the index, it cannot be + * guaranteed that the value actually exists. If it does not, we assume + * the user handles this case in the query and return 0. + * */ + protected abstract NumericDoubleValues distance(LeafReaderContext context); + + @Override + public final LeafScoreFunction getLeafScoreFunction(final LeafReaderContext ctx) { + final NumericDoubleValues distance = distance(ctx); + return new LeafScoreFunction() { + + @Override + public double score(int docId, float subQueryScore) { + return func.evaluate(distance.get(docId), scale); + } + + @Override + public Explanation explainScore(int docId, Explanation subQueryScore) throws IOException { + return Explanation.match( + CombineFunction.toFloat(score(docId, subQueryScore.getValue())), + "Function for field " + getFieldName() + ":", + func.explainFunction(getDistanceString(ctx, docId), distance.get(docId), scale)); + } + }; + } + + protected abstract String getDistanceString(LeafReaderContext ctx, int docId); + + protected abstract String getFieldName(); + + @Override + protected boolean doEquals(ScoreFunction other) { + AbstractDistanceScoreFunction distanceScoreFunction = (AbstractDistanceScoreFunction) other; + return Objects.equals(this.scale, distanceScoreFunction.scale) && + Objects.equals(this.offset, distanceScoreFunction.offset) && + Objects.equals(this.mode, distanceScoreFunction.mode) && + Objects.equals(this.func, distanceScoreFunction.func) && + Objects.equals(this.getFieldName(), distanceScoreFunction.getFieldName()); + } } } diff --git a/core/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunctionParser.java b/core/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunctionParser.java index 6e74959ab3f..1b4dbaea3e2 100644 --- a/core/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunctionParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunctionParser.java @@ -19,39 +19,20 @@ package org.elasticsearch.index.query.functionscore; -import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.search.Explanation; -import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.geo.GeoDistance; -import org.elasticsearch.common.geo.GeoPoint; -import org.elasticsearch.common.geo.GeoUtils; -import org.elasticsearch.common.lucene.search.function.CombineFunction; -import org.elasticsearch.common.lucene.search.function.LeafScoreFunction; -import org.elasticsearch.common.lucene.search.function.ScoreFunction; -import org.elasticsearch.common.unit.DistanceUnit; -import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.fielddata.IndexGeoPointFieldData; -import org.elasticsearch.index.fielddata.IndexNumericFieldData; -import org.elasticsearch.index.fielddata.MultiGeoPointValues; -import org.elasticsearch.index.fielddata.NumericDoubleValues; -import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; -import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.mapper.core.DateFieldMapper; -import org.elasticsearch.index.mapper.core.NumberFieldMapper; -import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper; -import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.common.ParsingException; +import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.functionscore.gauss.GaussDecayFunctionBuilder; import org.elasticsearch.index.query.functionscore.gauss.GaussDecayFunctionParser; import org.elasticsearch.search.MultiValueMode; -import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; -import java.util.Locale; /** * This class provides the basic functionality needed for adding a decay @@ -65,8 +46,9 @@ import java.util.Locale; * "fieldname1" : { * "origin" = "someValue", * "scale" = "someValue" - * } - * + * }, + * "multi_value_mode" : "min" + * } * * * @@ -84,24 +66,25 @@ import java.util.Locale; * parameters origin and scale. *

* To write a new scoring function, create a new class that inherits from this - * one and implement the getDistanceFunction(). Furthermore, to create a builder, - * override the getName() in {@link DecayFunctionBuilder}. + * one and implements {@link #getBuilderPrototype()} and {@link #getNames()}. + * Also create its corresponding {@link DecayFunctionBuilder}. The latter needs to + * implement {@link DecayFunctionBuilder#doReadFrom(StreamInput)} and + * {@link DecayFunctionBuilder#doWriteTo(StreamOutput)} for serialization purposes, + * {@link DecayFunctionBuilder#doEquals(DecayFunctionBuilder)} and + * {@link DecayFunctionBuilder#doHashCode()} for equality checks, + * {@link DecayFunctionBuilder#getName()} that returns the name of the function and + * {@link DecayFunctionBuilder#getDecayFunction()} which returns the corresponding lucene function. *

* See {@link GaussDecayFunctionBuilder} and {@link GaussDecayFunctionParser} * for an example. The parser furthermore needs to be registered in the * {@link org.elasticsearch.search.SearchModule SearchModule}. * - * **/ + */ -public abstract class DecayFunctionParser implements ScoreFunctionParser { +public abstract class DecayFunctionParser> implements ScoreFunctionParser { public static final ParseField MULTI_VALUE_MODE = new ParseField("multi_value_mode"); - /** - * Override this function if you want to produce your own scorer. - * */ - public abstract DecayFunction getDecayFunction(); - /** * Parses bodies of the kind * @@ -109,371 +92,40 @@ public abstract class DecayFunctionParser implements ScoreFunctionParser { * * { * "fieldname1" : { - * "origin" = "someValue", - * "scale" = "someValue" - * } - * + * "origin" : "someValue", + * "scale" : "someValue" + * }, + * "multi_value_mode" : "min" * } * * - * - * */ + */ @Override - public ScoreFunction parse(QueryParseContext parseContext, XContentParser parser) throws IOException, ParsingException { + public DFB fromXContent(QueryParseContext context, XContentParser parser) throws IOException, ParsingException { String currentFieldName; XContentParser.Token token; - AbstractDistanceScoreFunction scoreFunction; - String multiValueMode = "MIN"; - XContentBuilder variableContent = XContentFactory.jsonBuilder(); + MultiValueMode multiValueMode = DecayFunctionBuilder.DEFAULT_MULTI_VALUE_MODE; String fieldName = null; + BytesReference functionBytes = null; while ((token = parser.nextToken()) == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); token = parser.nextToken(); if (token == XContentParser.Token.START_OBJECT) { - variableContent.copyCurrentStructure(parser); fieldName = currentFieldName; - } else if (parseContext.parseFieldMatcher().match(currentFieldName, MULTI_VALUE_MODE)) { - multiValueMode = parser.text(); + XContentBuilder builder = XContentFactory.jsonBuilder(); + builder.copyCurrentStructure(parser); + functionBytes = builder.bytes(); + } else if (context.parseFieldMatcher().match(currentFieldName, MULTI_VALUE_MODE)) { + multiValueMode = MultiValueMode.fromString(parser.text()); } else { - throw new ElasticsearchParseException("malformed score function score parameters."); + throw new ParsingException(parser.getTokenLocation(), "malformed score function score parameters."); } } - if (fieldName == null) { - throw new ElasticsearchParseException("malformed score function score parameters."); + if (fieldName == null || functionBytes == null) { + throw new ParsingException(parser.getTokenLocation(), "malformed score function score parameters."); } - XContentParser variableParser = XContentFactory.xContent(variableContent.string()).createParser(variableContent.string()); - scoreFunction = parseVariable(fieldName, variableParser, parseContext, MultiValueMode.fromString(multiValueMode.toUpperCase(Locale.ROOT))); - return scoreFunction; + DFB functionBuilder = getBuilderPrototype().createFunctionBuilder(fieldName, functionBytes); + functionBuilder.setMultiValueMode(multiValueMode); + return functionBuilder; } - - // parses origin and scale parameter for field "fieldName" - private AbstractDistanceScoreFunction parseVariable(String fieldName, XContentParser parser, QueryParseContext parseContext, MultiValueMode mode) throws IOException { - - // now, the field must exist, else we cannot read the value for - // the doc later - MappedFieldType fieldType = parseContext.fieldMapper(fieldName); - if (fieldType == null) { - throw new ParsingException(parseContext, "unknown field [{}]", fieldName); - } - - // dates and time need special handling - parser.nextToken(); - if (fieldType instanceof DateFieldMapper.DateFieldType) { - return parseDateVariable(fieldName, parser, parseContext, (DateFieldMapper.DateFieldType) fieldType, mode); - } else if (fieldType instanceof GeoPointFieldMapper.GeoPointFieldType) { - return parseGeoVariable(fieldName, parser, parseContext, (GeoPointFieldMapper.GeoPointFieldType) fieldType, mode); - } else if (fieldType instanceof NumberFieldMapper.NumberFieldType) { - return parseNumberVariable(fieldName, parser, parseContext, (NumberFieldMapper.NumberFieldType) fieldType, mode); - } else { - throw new ParsingException(parseContext, "field [{}] is of type [{}], but only numeric types are supported.", fieldName, fieldType); - } - } - - private AbstractDistanceScoreFunction parseNumberVariable(String fieldName, XContentParser parser, QueryParseContext parseContext, - NumberFieldMapper.NumberFieldType fieldType, MultiValueMode mode) throws IOException { - XContentParser.Token token; - String parameterName = null; - double scale = 0; - double origin = 0; - double decay = 0.5; - double offset = 0.0d; - boolean scaleFound = false; - boolean refFound = false; - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - parameterName = parser.currentName(); - } else if (parameterName.equals(DecayFunctionBuilder.SCALE)) { - scale = parser.doubleValue(); - scaleFound = true; - } else if (parameterName.equals(DecayFunctionBuilder.DECAY)) { - decay = parser.doubleValue(); - } else if (parameterName.equals(DecayFunctionBuilder.ORIGIN)) { - origin = parser.doubleValue(); - refFound = true; - } else if (parameterName.equals(DecayFunctionBuilder.OFFSET)) { - offset = parser.doubleValue(); - } else { - throw new ElasticsearchParseException("parameter [{}] not supported!", parameterName); - } - } - if (!scaleFound || !refFound) { - throw new ElasticsearchParseException("both [{}] and [{}] must be set for numeric fields.", DecayFunctionBuilder.SCALE, DecayFunctionBuilder.ORIGIN); - } - IndexNumericFieldData numericFieldData = parseContext.getForField(fieldType); - return new NumericFieldDataScoreFunction(origin, scale, decay, offset, getDecayFunction(), numericFieldData, mode); - } - - private AbstractDistanceScoreFunction parseGeoVariable(String fieldName, XContentParser parser, QueryParseContext parseContext, - GeoPointFieldMapper.GeoPointFieldType fieldType, MultiValueMode mode) throws IOException { - XContentParser.Token token; - String parameterName = null; - GeoPoint origin = new GeoPoint(); - String scaleString = null; - String offsetString = "0km"; - double decay = 0.5; - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - parameterName = parser.currentName(); - } else if (parameterName.equals(DecayFunctionBuilder.SCALE)) { - scaleString = parser.text(); - } else if (parameterName.equals(DecayFunctionBuilder.ORIGIN)) { - origin = GeoUtils.parseGeoPoint(parser); - } else if (parameterName.equals(DecayFunctionBuilder.DECAY)) { - decay = parser.doubleValue(); - } else if (parameterName.equals(DecayFunctionBuilder.OFFSET)) { - offsetString = parser.text(); - } else { - throw new ElasticsearchParseException("parameter [{}] not supported!", parameterName); - } - } - if (origin == null || scaleString == null) { - throw new ElasticsearchParseException("[{}] and [{}] must be set for geo fields.", DecayFunctionBuilder.ORIGIN, DecayFunctionBuilder.SCALE); - } - double scale = DistanceUnit.DEFAULT.parse(scaleString, DistanceUnit.DEFAULT); - double offset = DistanceUnit.DEFAULT.parse(offsetString, DistanceUnit.DEFAULT); - IndexGeoPointFieldData indexFieldData = parseContext.getForField(fieldType); - return new GeoFieldDataScoreFunction(origin, scale, decay, offset, getDecayFunction(), indexFieldData, mode); - - } - - private AbstractDistanceScoreFunction parseDateVariable(String fieldName, XContentParser parser, QueryParseContext parseContext, - DateFieldMapper.DateFieldType dateFieldType, MultiValueMode mode) throws IOException { - XContentParser.Token token; - String parameterName = null; - String scaleString = null; - String originString = null; - String offsetString = "0d"; - double decay = 0.5; - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - parameterName = parser.currentName(); - } else if (parameterName.equals(DecayFunctionBuilder.SCALE)) { - scaleString = parser.text(); - } else if (parameterName.equals(DecayFunctionBuilder.ORIGIN)) { - originString = parser.text(); - } else if (parameterName.equals(DecayFunctionBuilder.DECAY)) { - decay = parser.doubleValue(); - } else if (parameterName.equals(DecayFunctionBuilder.OFFSET)) { - offsetString = parser.text(); - } else { - throw new ElasticsearchParseException("parameter [{}] not supported!", parameterName); - } - } - long origin = SearchContext.current().nowInMillis(); - if (originString != null) { - origin = dateFieldType.parseToMilliseconds(originString, false, null, null); - } - - if (scaleString == null) { - throw new ElasticsearchParseException("[{}] must be set for date fields.", DecayFunctionBuilder.SCALE); - } - TimeValue val = TimeValue.parseTimeValue(scaleString, TimeValue.timeValueHours(24), getClass().getSimpleName() + ".scale"); - double scale = val.getMillis(); - val = TimeValue.parseTimeValue(offsetString, TimeValue.timeValueHours(24), getClass().getSimpleName() + ".offset"); - double offset = val.getMillis(); - IndexNumericFieldData numericFieldData = parseContext.getForField(dateFieldType); - return new NumericFieldDataScoreFunction(origin, scale, decay, offset, getDecayFunction(), numericFieldData, mode); - } - - static class GeoFieldDataScoreFunction extends AbstractDistanceScoreFunction { - - private final GeoPoint origin; - private final IndexGeoPointFieldData fieldData; - - private static final GeoDistance distFunction = GeoDistance.DEFAULT; - - public GeoFieldDataScoreFunction(GeoPoint origin, double scale, double decay, double offset, DecayFunction func, - IndexGeoPointFieldData fieldData, MultiValueMode mode) { - super(scale, decay, offset, func, mode); - this.origin = origin; - this.fieldData = fieldData; - } - - @Override - public boolean needsScores() { - return false; - } - - @Override - protected NumericDoubleValues distance(LeafReaderContext context) { - final MultiGeoPointValues geoPointValues = fieldData.load(context).getGeoPointValues(); - return mode.select(new MultiValueMode.UnsortedNumericDoubleValues() { - @Override - public int count() { - return geoPointValues.count(); - } - - @Override - public void setDocument(int docId) { - geoPointValues.setDocument(docId); - } - - @Override - public double valueAt(int index) { - GeoPoint other = geoPointValues.valueAt(index); - return Math.max(0.0d, distFunction.calculate(origin.lat(), origin.lon(), other.lat(), other.lon(), DistanceUnit.METERS) - offset); - } - }, 0.0); - } - - @Override - protected String getDistanceString(LeafReaderContext ctx, int docId) { - StringBuilder values = new StringBuilder(mode.name()); - values.append(" of: ["); - final MultiGeoPointValues geoPointValues = fieldData.load(ctx).getGeoPointValues(); - geoPointValues.setDocument(docId); - final int num = geoPointValues.count(); - if (num > 0) { - for (int i = 0; i < num; i++) { - GeoPoint value = geoPointValues.valueAt(i); - values.append("Math.max(arcDistance("); - values.append(value).append("(=doc value),").append(origin).append("(=origin)) - ").append(offset).append("(=offset), 0)"); - if (i != num - 1) { - values.append(", "); - } - } - } else { - values.append("0.0"); - } - values.append("]"); - return values.toString(); - } - - @Override - protected String getFieldName() { - return fieldData.getFieldNames().fullName(); - } - } - - static class NumericFieldDataScoreFunction extends AbstractDistanceScoreFunction { - - private final IndexNumericFieldData fieldData; - private final double origin; - - public NumericFieldDataScoreFunction(double origin, double scale, double decay, double offset, DecayFunction func, - IndexNumericFieldData fieldData, MultiValueMode mode) { - super(scale, decay, offset, func, mode); - this.fieldData = fieldData; - this.origin = origin; - } - - @Override - public boolean needsScores() { - return false; - } - - @Override - protected NumericDoubleValues distance(LeafReaderContext context) { - final SortedNumericDoubleValues doubleValues = fieldData.load(context).getDoubleValues(); - return mode.select(new MultiValueMode.UnsortedNumericDoubleValues() { - @Override - public int count() { - return doubleValues.count(); - } - - @Override - public void setDocument(int docId) { - doubleValues.setDocument(docId); - } - - @Override - public double valueAt(int index) { - return Math.max(0.0d, Math.abs(doubleValues.valueAt(index) - origin) - offset); - } - }, 0.0); - } - - @Override - protected String getDistanceString(LeafReaderContext ctx, int docId) { - - StringBuilder values = new StringBuilder(mode.name()); - values.append("["); - final SortedNumericDoubleValues doubleValues = fieldData.load(ctx).getDoubleValues(); - doubleValues.setDocument(docId); - final int num = doubleValues.count(); - if (num > 0) { - for (int i = 0; i < num; i++) { - double value = doubleValues.valueAt(i); - values.append("Math.max(Math.abs("); - values.append(value).append("(=doc value) - ").append(origin).append("(=origin))) - ").append(offset).append("(=offset), 0)"); - if (i != num - 1) { - values.append(", "); - } - } - } else { - values.append("0.0"); - } - values.append("]"); - return values.toString(); - - } - - @Override - protected String getFieldName() { - return fieldData.getFieldNames().fullName(); - } - } - - /** - * This is the base class for scoring a single field. - * - * */ - public static abstract class AbstractDistanceScoreFunction extends ScoreFunction { - - private final double scale; - protected final double offset; - private final DecayFunction func; - protected final MultiValueMode mode; - - public AbstractDistanceScoreFunction(double userSuppiedScale, double decay, double offset, DecayFunction func, MultiValueMode mode) { - super(CombineFunction.MULT); - this.mode = mode; - if (userSuppiedScale <= 0.0) { - throw new IllegalArgumentException(FunctionScoreQueryParser.NAME + " : scale must be > 0.0."); - } - if (decay <= 0.0 || decay >= 1.0) { - throw new IllegalArgumentException(FunctionScoreQueryParser.NAME - + " : decay must be in the range [0..1]."); - } - this.scale = func.processScale(userSuppiedScale, decay); - this.func = func; - if (offset < 0.0d) { - throw new IllegalArgumentException(FunctionScoreQueryParser.NAME + " : offset must be > 0.0"); - } - this.offset = offset; - } - - /** - * This function computes the distance from a defined origin. Since - * the value of the document is read from the index, it cannot be - * guaranteed that the value actually exists. If it does not, we assume - * the user handles this case in the query and return 0. - * */ - protected abstract NumericDoubleValues distance(LeafReaderContext context); - - @Override - public final LeafScoreFunction getLeafScoreFunction(final LeafReaderContext ctx) { - final NumericDoubleValues distance = distance(ctx); - return new LeafScoreFunction() { - - @Override - public double score(int docId, float subQueryScore) { - return func.evaluate(distance.get(docId), scale); - } - - @Override - public Explanation explainScore(int docId, Explanation subQueryScore) throws IOException { - return Explanation.match( - CombineFunction.toFloat(score(docId, subQueryScore.getValue())), - "Function for field " + getFieldName() + ":", - func.explainFunction(getDistanceString(ctx, docId), distance.get(docId), scale)); - } - }; - } - - protected abstract String getDistanceString(LeafReaderContext ctx, int docId); - - protected abstract String getFieldName(); - } - } diff --git a/core/src/main/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilder.java index dec90b130ca..d5c260f9616 100644 --- a/core/src/main/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilder.java @@ -19,119 +19,163 @@ package org.elasticsearch.index.query.functionscore; +import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.search.Query; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.lucene.search.function.CombineFunction; +import org.elasticsearch.common.lucene.search.function.FiltersFunctionScoreQuery; +import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery; +import org.elasticsearch.common.lucene.search.function.ScoreFunction; +import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.index.query.BoostableQueryBuilder; -import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.*; +import org.elasticsearch.index.query.functionscore.random.RandomScoreFunctionBuilder; import java.io.IOException; -import java.util.ArrayList; +import java.util.Arrays; +import java.util.Locale; +import java.util.Objects; /** * A query that uses a filters with a script associated with them to compute the * score. */ -public class FunctionScoreQueryBuilder extends QueryBuilder implements BoostableQueryBuilder { +public class FunctionScoreQueryBuilder extends AbstractQueryBuilder { - private final QueryBuilder queryBuilder; + public static final String NAME = "function_score"; - private Float boost; + public static final CombineFunction DEFAULT_BOOST_MODE = CombineFunction.MULTIPLY; + public static final FiltersFunctionScoreQuery.ScoreMode DEFAULT_SCORE_MODE = FiltersFunctionScoreQuery.ScoreMode.MULTIPLY; - private Float maxBoost; + private final QueryBuilder query; - private String scoreMode; + private float maxBoost = FunctionScoreQuery.DEFAULT_MAX_BOOST; - private String boostMode; + private FiltersFunctionScoreQuery.ScoreMode scoreMode = DEFAULT_SCORE_MODE; + + private CombineFunction boostMode; - private ArrayList filters = new ArrayList<>(); - private ArrayList scoreFunctions = new ArrayList<>(); private Float minScore = null; + private final FilterFunctionBuilder[] filterFunctionBuilders; + /** - * Creates a function_score query that executes on documents that match query a query. - * Query and filter will be wrapped into a filtered_query. + * Creates a function_score query without functions * - * @param queryBuilder the query that defines which documents the function_score query will be executed on. + * @param query the query that needs to be custom scored */ - public FunctionScoreQueryBuilder(QueryBuilder queryBuilder) { - this.queryBuilder = queryBuilder; - } - - public FunctionScoreQueryBuilder() { - this.queryBuilder = null; + public FunctionScoreQueryBuilder(QueryBuilder query) { + this(query, new FilterFunctionBuilder[0]); } /** - * Creates a function_score query that will execute the function scoreFunctionBuilder on all documents. + * Creates a function_score query that executes the provided filters and functions on all documents + * + * @param filterFunctionBuilders the filters and functions + */ + public FunctionScoreQueryBuilder(FilterFunctionBuilder[] filterFunctionBuilders) { + this(new MatchAllQueryBuilder(), filterFunctionBuilders); + } + + /** + * Creates a function_score query that will execute the function provided on all documents * * @param scoreFunctionBuilder score function that is executed */ public FunctionScoreQueryBuilder(ScoreFunctionBuilder scoreFunctionBuilder) { - if (scoreFunctionBuilder == null) { - throw new IllegalArgumentException("function_score: function must not be null"); - } - queryBuilder = null; - this.filters.add(null); - this.scoreFunctions.add(scoreFunctionBuilder); + this(new MatchAllQueryBuilder(), new FilterFunctionBuilder[]{new FilterFunctionBuilder(scoreFunctionBuilder)}); } /** - * Adds a score function that will will execute the function scoreFunctionBuilder on all documents matching the filter. + * Creates a function_score query that will execute the function provided in the context of the provided query * - * @param filter the filter that defines which documents the function_score query will be executed on. + * @param query the query to custom score * @param scoreFunctionBuilder score function that is executed */ - public FunctionScoreQueryBuilder add(QueryBuilder filter, ScoreFunctionBuilder scoreFunctionBuilder) { - if (scoreFunctionBuilder == null) { - throw new IllegalArgumentException("function_score: function must not be null"); - } - this.filters.add(filter); - this.scoreFunctions.add(scoreFunctionBuilder); - return this; + public FunctionScoreQueryBuilder(QueryBuilder query, ScoreFunctionBuilder scoreFunctionBuilder) { + this(query, new FilterFunctionBuilder[]{new FilterFunctionBuilder(scoreFunctionBuilder)}); } /** - * Adds a score function that will will execute the function scoreFunctionBuilder on all documents. + * Creates a function_score query that executes the provided filters and functions on documents that match a query. * - * @param scoreFunctionBuilder score function that is executed + * @param query the query that defines which documents the function_score query will be executed on. + * @param filterFunctionBuilders the filters and functions */ - public FunctionScoreQueryBuilder add(ScoreFunctionBuilder scoreFunctionBuilder) { - if (scoreFunctionBuilder == null) { - throw new IllegalArgumentException("function_score: function must not be null"); + public FunctionScoreQueryBuilder(QueryBuilder query, FilterFunctionBuilder[] filterFunctionBuilders) { + if (query == null) { + throw new IllegalArgumentException("function_score: query must not be null"); } - this.filters.add(null); - this.scoreFunctions.add(scoreFunctionBuilder); - return this; + if (filterFunctionBuilders == null) { + throw new IllegalArgumentException("function_score: filters and functions array must not be null"); + } + for (FilterFunctionBuilder filterFunctionBuilder : filterFunctionBuilders) { + if (filterFunctionBuilder == null) { + throw new IllegalArgumentException("function_score: each filter and function must not be null"); + } + } + this.query = query; + this.filterFunctionBuilders = filterFunctionBuilders; + } + + /** + * Returns the query that defines which documents the function_score query will be executed on. + */ + public QueryBuilder query() { + return this.query; + } + + /** + * Returns the filters and functions + */ + public FilterFunctionBuilder[] filterFunctionBuilders() { + return this.filterFunctionBuilders; } /** * Score mode defines how results of individual score functions will be aggregated. - * Can be first, avg, max, sum, min, multiply + * @see org.elasticsearch.common.lucene.search.function.FiltersFunctionScoreQuery.ScoreMode */ - public FunctionScoreQueryBuilder scoreMode(String scoreMode) { + public FunctionScoreQueryBuilder scoreMode(FiltersFunctionScoreQuery.ScoreMode scoreMode) { + if (scoreMode == null) { + throw new IllegalArgumentException("[" + NAME + "] requires 'score_mode' field"); + } this.scoreMode = scoreMode; return this; } /** - * Score mode defines how the combined result of score functions will influence the final score together with the sub query score. - * Can be replace, avg, max, sum, min, multiply + * Returns the score mode, meaning how results of individual score functions will be aggregated. + * @see org.elasticsearch.common.lucene.search.function.FiltersFunctionScoreQuery.ScoreMode */ - public FunctionScoreQueryBuilder boostMode(String boostMode) { - this.boostMode = boostMode; - return this; + public FiltersFunctionScoreQuery.ScoreMode scoreMode() { + return this.scoreMode; } /** - * Score mode defines how the combined result of score functions will influence the final score together with the sub query score. + * Boost mode defines how the combined result of score functions will influence the final score together with the sub query score. + * @see CombineFunction */ public FunctionScoreQueryBuilder boostMode(CombineFunction combineFunction) { - this.boostMode = combineFunction.getName(); + if (combineFunction == null) { + throw new IllegalArgumentException("[" + NAME + "] requires 'boost_mode' field"); + } + this.boostMode = combineFunction; return this; } /** - * Tha maximum boost that will be applied by function score. + * Returns the boost mode, meaning how the combined result of score functions will influence the final score together with the sub query score. + * @see CombineFunction + */ + public CombineFunction boostMode() { + return this.boostMode; + } + + /** + * Sets the maximum boost that will be applied by function score. */ public FunctionScoreQueryBuilder maxBoost(float maxBoost) { this.maxBoost = maxBoost; @@ -139,51 +183,34 @@ public class FunctionScoreQueryBuilder extends QueryBuilder implements Boostable } /** - * Sets the boost for this query. Documents matching this query will (in - * addition to the normal weightings) have their score multiplied by the - * boost provided. + * Returns the maximum boost that will be applied by function score. */ - @Override - public FunctionScoreQueryBuilder boost(float boost) { - this.boost = boost; - return this; + public float maxBoost() { + return this.maxBoost; } @Override protected void doXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(FunctionScoreQueryParser.NAME); - if (queryBuilder != null) { + builder.startObject(NAME); + if (query != null) { builder.field("query"); - queryBuilder.toXContent(builder, params); + query.toXContent(builder, params); } builder.startArray("functions"); - for (int i = 0; i < filters.size(); i++) { - builder.startObject(); - if (filters.get(i) != null) { - builder.field("filter"); - filters.get(i).toXContent(builder, params); - } - scoreFunctions.get(i).toXContent(builder, params); - builder.endObject(); + for (FilterFunctionBuilder filterFunctionBuilder : filterFunctionBuilders) { + filterFunctionBuilder.toXContent(builder, params); } builder.endArray(); - if (scoreMode != null) { - builder.field("score_mode", scoreMode); - } + builder.field("score_mode", scoreMode.name().toLowerCase(Locale.ROOT)); if (boostMode != null) { - builder.field("boost_mode", boostMode); - } - if (maxBoost != null) { - builder.field("max_boost", maxBoost); - } - if (boost != null) { - builder.field("boost", boost); + builder.field("boost_mode", boostMode.name().toLowerCase(Locale.ROOT)); } + builder.field("max_boost", maxBoost); if (minScore != null) { builder.field("min_score", minScore); } - + printBoostAndQueryName(builder); builder.endObject(); } @@ -191,4 +218,176 @@ public class FunctionScoreQueryBuilder extends QueryBuilder implements Boostable this.minScore = minScore; return this; } + + public Float getMinScore() { + return this.minScore; + } + + @Override + public String getWriteableName() { + return FunctionScoreQueryBuilder.NAME; + } + + @Override + protected boolean doEquals(FunctionScoreQueryBuilder other) { + return Objects.equals(this.query, other.query) && + Arrays.equals(this.filterFunctionBuilders, other.filterFunctionBuilders) && + Objects.equals(this.boostMode, other.boostMode) && + Objects.equals(this.scoreMode, other.scoreMode) && + Objects.equals(this.minScore, other.minScore) && + Objects.equals(this.maxBoost, other.maxBoost); + } + + @Override + protected int doHashCode() { + return Objects.hash(this.query, Arrays.hashCode(this.filterFunctionBuilders), this.boostMode, this.scoreMode, this.minScore, this.maxBoost); + } + + @Override + protected FunctionScoreQueryBuilder doReadFrom(StreamInput in) throws IOException { + QueryBuilder query = in.readQuery(); + int size = in.readVInt(); + FilterFunctionBuilder[] filterFunctionBuilders = new FilterFunctionBuilder[size]; + for (int i = 0; i < size; i++) { + filterFunctionBuilders[i] = FilterFunctionBuilder.PROTOTYPE.readFrom(in); + } + FunctionScoreQueryBuilder functionScoreQueryBuilder = new FunctionScoreQueryBuilder(query, filterFunctionBuilders); + functionScoreQueryBuilder.maxBoost(in.readFloat()); + if (in.readBoolean()) { + functionScoreQueryBuilder.setMinScore(in.readFloat()); + } + if (in.readBoolean()) { + functionScoreQueryBuilder.boostMode(CombineFunction.readCombineFunctionFrom(in)); + } + functionScoreQueryBuilder.scoreMode(FiltersFunctionScoreQuery.ScoreMode.readScoreModeFrom(in)); + return functionScoreQueryBuilder; + } + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + out.writeQuery(query); + out.writeVInt(filterFunctionBuilders.length); + for (FilterFunctionBuilder filterFunctionBuilder : filterFunctionBuilders) { + filterFunctionBuilder.writeTo(out); + } + out.writeFloat(maxBoost); + if (minScore == null) { + out.writeBoolean(false); + } else { + out.writeBoolean(true); + out.writeFloat(minScore); + } + if (boostMode == null) { + out.writeBoolean(false); + } else { + out.writeBoolean(true); + boostMode.writeTo(out); + } + scoreMode.writeTo(out); + } + + @Override + protected Query doToQuery(QueryShardContext context) throws IOException { + FiltersFunctionScoreQuery.FilterFunction[] filterFunctions = new FiltersFunctionScoreQuery.FilterFunction[filterFunctionBuilders.length]; + int i = 0; + for (FilterFunctionBuilder filterFunctionBuilder : filterFunctionBuilders) { + Query filter = filterFunctionBuilder.getFilter().toQuery(context); + ScoreFunction scoreFunction = filterFunctionBuilder.getScoreFunction().toFunction(context); + filterFunctions[i++] = new FiltersFunctionScoreQuery.FilterFunction(filter, scoreFunction); + } + + Query query = this.query.toQuery(context); + if (query == null) { + query = new MatchAllDocsQuery(); + } + + // handle cases where only one score function and no filter was provided. In this case we create a FunctionScoreQuery. + if (filterFunctions.length == 0 || filterFunctions.length == 1 && (this.filterFunctionBuilders[0].getFilter().getName().equals(MatchAllQueryBuilder.NAME))) { + ScoreFunction function = filterFunctions.length == 0 ? null : filterFunctions[0].function; + CombineFunction combineFunction = this.boostMode; + if (combineFunction == null) { + if (function != null) { + combineFunction = function.getDefaultScoreCombiner(); + } else { + combineFunction = DEFAULT_BOOST_MODE; + } + } + return new FunctionScoreQuery(query, function, minScore, combineFunction, maxBoost); + } + // in all other cases we create a FiltersFunctionScoreQuery + return new FiltersFunctionScoreQuery(query, scoreMode, filterFunctions, maxBoost, minScore, boostMode == null ? DEFAULT_BOOST_MODE : boostMode); + } + + /** + * Function to be associated with an optional filter, meaning it will be executed only for the documents + * that match the given filter. + */ + public static class FilterFunctionBuilder implements ToXContent, Writeable { + private static final FilterFunctionBuilder PROTOTYPE = new FilterFunctionBuilder(EmptyQueryBuilder.PROTOTYPE, new RandomScoreFunctionBuilder()); + + private final QueryBuilder filter; + private final ScoreFunctionBuilder scoreFunction; + + public FilterFunctionBuilder(ScoreFunctionBuilder scoreFunctionBuilder) { + this(new MatchAllQueryBuilder(), scoreFunctionBuilder); + } + + public FilterFunctionBuilder(QueryBuilder filter, ScoreFunctionBuilder scoreFunction) { + if (filter == null) { + throw new IllegalArgumentException("function_score: filter must not be null"); + } + if (scoreFunction == null) { + throw new IllegalArgumentException("function_score: function must not be null"); + } + this.filter = filter; + this.scoreFunction = scoreFunction; + } + + public QueryBuilder getFilter() { + return filter; + } + + public ScoreFunctionBuilder getScoreFunction() { + return scoreFunction; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("filter"); + filter.toXContent(builder, params); + scoreFunction.toXContent(builder, params); + builder.endObject(); + return builder; + } + + @Override + public int hashCode() { + return Objects.hash(filter, scoreFunction); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + FilterFunctionBuilder that = (FilterFunctionBuilder) obj; + return Objects.equals(this.filter, that.filter) && + Objects.equals(this.scoreFunction, that.scoreFunction); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeQuery(filter); + out.writeScoreFunction(scoreFunction); + } + + @Override + public FilterFunctionBuilder readFrom(StreamInput in) throws IOException { + return new FilterFunctionBuilder(in.readQuery(), in.readScoreFunction()); + } + } } diff --git a/core/src/main/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryParser.java index d50b81f6b66..7adde617009 100644 --- a/core/src/main/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryParser.java @@ -19,32 +19,28 @@ package org.elasticsearch.index.query.functionscore; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.ImmutableMap.Builder; -import org.apache.lucene.search.BooleanClause.Occur; -import org.apache.lucene.search.BooleanQuery; -import org.apache.lucene.search.ConstantScoreQuery; -import org.apache.lucene.search.Query; -import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.lucene.search.Queries; -import org.elasticsearch.common.lucene.search.function.*; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.query.QueryParseContext; -import org.elasticsearch.index.query.QueryParser; import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.xcontent.*; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.lucene.search.function.CombineFunction; +import org.elasticsearch.common.lucene.search.function.FiltersFunctionScoreQuery; +import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.query.*; +import org.elasticsearch.index.query.functionscore.weight.WeightBuilder; import java.io.IOException; import java.util.ArrayList; +import java.util.List; /** - * + * Parser for function_score query */ -public class FunctionScoreQueryParser implements QueryParser { +public class FunctionScoreQueryParser implements QueryParser { - public static final String NAME = "function_score"; + private static final FunctionScoreQueryBuilder PROTOTYPE = new FunctionScoreQueryBuilder(EmptyQueryBuilder.PROTOTYPE, new FunctionScoreQueryBuilder.FilterFunctionBuilder[0]); // For better readability of error message static final String MISPLACED_FUNCTION_MESSAGE_PREFIX = "you can either define [functions] array or a single function, not both. "; @@ -52,7 +48,7 @@ public class FunctionScoreQueryParser implements QueryParser { public static final ParseField WEIGHT_FIELD = new ParseField("weight"); private static final ParseField FILTER_FIELD = new ParseField("filter").withAllDeprecated("query"); - ScoreFunctionParserMapper functionParserMapper; + private final ScoreFunctionParserMapper functionParserMapper; @Inject public FunctionScoreQueryParser(ScoreFunctionParserMapper functionParserMapper) { @@ -61,141 +57,120 @@ public class FunctionScoreQueryParser implements QueryParser { @Override public String[] names() { - return new String[] { NAME, Strings.toCamelCase(NAME) }; - } - - private static final ImmutableMap combineFunctionsMap; - - static { - CombineFunction[] values = CombineFunction.values(); - Builder combineFunctionMapBuilder = ImmutableMap.builder(); - for (CombineFunction combineFunction : values) { - combineFunctionMapBuilder.put(combineFunction.getName(), combineFunction); - } - combineFunctionsMap = combineFunctionMapBuilder.build(); + return new String[] { FunctionScoreQueryBuilder.NAME, Strings.toCamelCase(FunctionScoreQueryBuilder.NAME) }; } @Override - public Query parse(QueryParseContext parseContext) throws IOException, ParsingException { + public FunctionScoreQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException { XContentParser parser = parseContext.parser(); - Query query = null; - Query filter = null; - float boost = 1.0f; + QueryBuilder query = null; + QueryBuilder filter = null; + float boost = AbstractQueryBuilder.DEFAULT_BOOST; + String queryName = null; - FiltersFunctionScoreQuery.ScoreMode scoreMode = FiltersFunctionScoreQuery.ScoreMode.Multiply; - ArrayList filterFunctions = new ArrayList<>(); - Float maxBoost = null; + FiltersFunctionScoreQuery.ScoreMode scoreMode = FunctionScoreQueryBuilder.DEFAULT_SCORE_MODE; + float maxBoost = FunctionScoreQuery.DEFAULT_MAX_BOOST; Float minScore = null; String currentFieldName = null; XContentParser.Token token; - CombineFunction combineFunction = CombineFunction.MULT; + CombineFunction combineFunction = null; // Either define array of functions and filters or only one function boolean functionArrayFound = false; boolean singleFunctionFound = false; String singleFunctionName = null; + List filterFunctionBuilders = new ArrayList<>(); while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if ("query".equals(currentFieldName)) { - query = parseContext.parseInnerQuery(); + query = parseContext.parseInnerQueryBuilder(); } else if (parseContext.parseFieldMatcher().match(currentFieldName, FILTER_FIELD)) { - filter = parseContext.parseInnerFilter(); + filter = parseContext.parseInnerQueryBuilder(); } else if ("score_mode".equals(currentFieldName) || "scoreMode".equals(currentFieldName)) { - scoreMode = parseScoreMode(parseContext, parser); + scoreMode = FiltersFunctionScoreQuery.ScoreMode.fromString(parser.text()); } else if ("boost_mode".equals(currentFieldName) || "boostMode".equals(currentFieldName)) { - combineFunction = parseBoostMode(parseContext, parser); + combineFunction = CombineFunction.fromString(parser.text()); } else if ("max_boost".equals(currentFieldName) || "maxBoost".equals(currentFieldName)) { maxBoost = parser.floatValue(); } else if ("boost".equals(currentFieldName)) { boost = parser.floatValue(); + } else if ("_name".equals(currentFieldName)) { + queryName = parser.text(); } else if ("min_score".equals(currentFieldName) || "minScore".equals(currentFieldName)) { minScore = parser.floatValue(); } else if ("functions".equals(currentFieldName)) { if (singleFunctionFound) { String errorString = "already found [" + singleFunctionName + "], now encountering [functions]."; - handleMisplacedFunctionsDeclaration(errorString); + handleMisplacedFunctionsDeclaration(parser.getTokenLocation(), errorString); } - currentFieldName = parseFiltersAndFunctions(parseContext, parser, filterFunctions, currentFieldName); functionArrayFound = true; + currentFieldName = parseFiltersAndFunctions(parseContext, parser, filterFunctionBuilders); } else { - ScoreFunction scoreFunction; - if (currentFieldName.equals("weight")) { - scoreFunction = new WeightFactorFunction(parser.floatValue()); + if (singleFunctionFound) { + throw new ParsingException(parser.getTokenLocation(), "failed to parse [{}] query. already found function [{}], now encountering [{}]. use [functions] array if you want to define several functions.", FunctionScoreQueryBuilder.NAME, singleFunctionName, currentFieldName); + } + if (functionArrayFound) { + String errorString = "already found [functions] array, now encountering [" + currentFieldName + "]."; + handleMisplacedFunctionsDeclaration(parser.getTokenLocation(), errorString); + } + singleFunctionFound = true; + singleFunctionName = currentFieldName; + ScoreFunctionBuilder scoreFunction; + if (parseContext.parseFieldMatcher().match(currentFieldName, WEIGHT_FIELD)) { + scoreFunction = new WeightBuilder().setWeight(parser.floatValue()); } else { // we try to parse a score function. If there is no score // function for the current field name, // functionParserMapper.get() will throw an Exception. - scoreFunction = functionParserMapper.get(parseContext, currentFieldName).parse(parseContext, parser); + scoreFunction = functionParserMapper.get(parser.getTokenLocation(), currentFieldName).fromXContent(parseContext, parser); } - if (functionArrayFound) { - String errorString = "already found [functions] array, now encountering [" + currentFieldName + "]."; - handleMisplacedFunctionsDeclaration(errorString); - } - if (filterFunctions.size() > 0) { - throw new ElasticsearchParseException("failed to parse [{}] query. already found function [{}], now encountering [{}]. use [functions] array if you want to define several functions.", NAME, singleFunctionName, currentFieldName); - } - filterFunctions.add(new FiltersFunctionScoreQuery.FilterFunction(null, scoreFunction)); - singleFunctionFound = true; - singleFunctionName = currentFieldName; + filterFunctionBuilders.add(new FunctionScoreQueryBuilder.FilterFunctionBuilder(scoreFunction)); } } + if (query == null && filter == null) { - query = Queries.newMatchAllQuery(); + query = new MatchAllQueryBuilder(); } else if (query == null && filter != null) { - query = new ConstantScoreQuery(filter); + query = new ConstantScoreQueryBuilder(filter); } else if (query != null && filter != null) { - final BooleanQuery.Builder filtered = new BooleanQuery.Builder(); - filtered.add(query, Occur.MUST); - filtered.add(filter, Occur.FILTER); - query = filtered.build(); + final BoolQueryBuilder boolQueryBuilder = new BoolQueryBuilder(); + boolQueryBuilder.must(query); + boolQueryBuilder.filter(filter); + query = boolQueryBuilder; } - // if all filter elements returned null, just use the query - if (filterFunctions.isEmpty() && combineFunction == null) { - return query; + + FunctionScoreQueryBuilder functionScoreQueryBuilder = new FunctionScoreQueryBuilder(query, + filterFunctionBuilders.toArray(new FunctionScoreQueryBuilder.FilterFunctionBuilder[filterFunctionBuilders.size()])); + if (combineFunction != null) { + functionScoreQueryBuilder.boostMode(combineFunction); } - if (maxBoost == null) { - maxBoost = Float.MAX_VALUE; - } - // handle cases where only one score function and no filter was - // provided. In this case we create a FunctionScoreQuery. - if (filterFunctions.size() == 0 || filterFunctions.size() == 1 && (filterFunctions.get(0).filter == null || Queries.isConstantMatchAllQuery(filterFunctions.get(0).filter))) { - ScoreFunction function = filterFunctions.size() == 0 ? null : filterFunctions.get(0).function; - FunctionScoreQuery theQuery = new FunctionScoreQuery(query, function, minScore); - if (combineFunction != null) { - theQuery.setCombineFunction(combineFunction); - } - theQuery.setBoost(boost); - theQuery.setMaxBoost(maxBoost); - return theQuery; - // in all other cases we create a FiltersFunctionScoreQuery. - } else { - FiltersFunctionScoreQuery functionScoreQuery = new FiltersFunctionScoreQuery(query, scoreMode, - filterFunctions.toArray(new FiltersFunctionScoreQuery.FilterFunction[filterFunctions.size()]), maxBoost, minScore); - if (combineFunction != null) { - functionScoreQuery.setCombineFunction(combineFunction); - } - functionScoreQuery.setBoost(boost); - return functionScoreQuery; + functionScoreQueryBuilder.scoreMode(scoreMode); + functionScoreQueryBuilder.maxBoost(maxBoost); + if (minScore != null) { + functionScoreQueryBuilder.setMinScore(minScore); } + functionScoreQueryBuilder.boost(boost); + functionScoreQueryBuilder.queryName(queryName); + return functionScoreQueryBuilder; } - private void handleMisplacedFunctionsDeclaration(String errorString) { - throw new ElasticsearchParseException("failed to parse [{}] query. [{}]", NAME, MISPLACED_FUNCTION_MESSAGE_PREFIX + errorString); + private static void handleMisplacedFunctionsDeclaration(XContentLocation contentLocation, String errorString) { + throw new ParsingException(contentLocation, "failed to parse [{}] query. [{}]", FunctionScoreQueryBuilder.NAME, MISPLACED_FUNCTION_MESSAGE_PREFIX + errorString); } - private String parseFiltersAndFunctions(QueryParseContext parseContext, XContentParser parser, - ArrayList filterFunctions, String currentFieldName) throws IOException { + private String parseFiltersAndFunctions(QueryParseContext parseContext, XContentParser parser, List filterFunctionBuilders) throws IOException { + String currentFieldName = null; XContentParser.Token token; while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { - Query filter = null; - ScoreFunction scoreFunction = null; + QueryBuilder filter = null; + ScoreFunctionBuilder scoreFunction = null; Float functionWeight = null; if (token != XContentParser.Token.START_OBJECT) { - throw new ParsingException(parseContext, "failed to parse [{}]. malformed query, expected a [{}] while parsing functions but got a [{}] instead", XContentParser.Token.START_OBJECT, token, NAME); + throw new ParsingException(parser.getTokenLocation(), "failed to parse [{}]. malformed query, expected a [{}] while parsing functions but got a [{}] instead", XContentParser.Token.START_OBJECT, token, FunctionScoreQueryBuilder.NAME); } else { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { @@ -204,57 +179,40 @@ public class FunctionScoreQueryParser implements QueryParser { functionWeight = parser.floatValue(); } else { if ("filter".equals(currentFieldName)) { - filter = parseContext.parseInnerFilter(); + filter = parseContext.parseInnerQueryBuilder(); } else { + if (scoreFunction != null) { + throw new ParsingException(parser.getTokenLocation(), "failed to parse function_score functions. already found [{}], now encountering [{}].", scoreFunction.getName(), currentFieldName); + } // do not need to check null here, // functionParserMapper throws exception if parser // non-existent - ScoreFunctionParser functionParser = functionParserMapper.get(parseContext, currentFieldName); - scoreFunction = functionParser.parse(parseContext, parser); + ScoreFunctionParser functionParser = functionParserMapper.get(parser.getTokenLocation(), currentFieldName); + scoreFunction = functionParser.fromXContent(parseContext, parser); } } } if (functionWeight != null) { - scoreFunction = new WeightFactorFunction(functionWeight, scoreFunction); + if (scoreFunction == null) { + scoreFunction = new WeightBuilder().setWeight(functionWeight); + } else { + scoreFunction.setWeight(functionWeight); + } } } if (filter == null) { - filter = Queries.newMatchAllQuery(); + filter = new MatchAllQueryBuilder(); } if (scoreFunction == null) { - throw new ElasticsearchParseException("failed to parse [{}] query. an entry in functions list is missing a function.", NAME); + throw new ParsingException(parser.getTokenLocation(), "failed to parse [{}] query. an entry in functions list is missing a function.", FunctionScoreQueryBuilder.NAME); } - filterFunctions.add(new FiltersFunctionScoreQuery.FilterFunction(filter, scoreFunction)); - + filterFunctionBuilders.add(new FunctionScoreQueryBuilder.FilterFunctionBuilder(filter, scoreFunction)); } return currentFieldName; } - private FiltersFunctionScoreQuery.ScoreMode parseScoreMode(QueryParseContext parseContext, XContentParser parser) throws IOException { - String scoreMode = parser.text(); - if ("avg".equals(scoreMode)) { - return FiltersFunctionScoreQuery.ScoreMode.Avg; - } else if ("max".equals(scoreMode)) { - return FiltersFunctionScoreQuery.ScoreMode.Max; - } else if ("min".equals(scoreMode)) { - return FiltersFunctionScoreQuery.ScoreMode.Min; - } else if ("sum".equals(scoreMode)) { - return FiltersFunctionScoreQuery.ScoreMode.Sum; - } else if ("multiply".equals(scoreMode)) { - return FiltersFunctionScoreQuery.ScoreMode.Multiply; - } else if ("first".equals(scoreMode)) { - return FiltersFunctionScoreQuery.ScoreMode.First; - } else { - throw new ParsingException(parseContext, "failed to parse [{}] query. illegal score_mode [{}]", NAME, scoreMode); - } + @Override + public FunctionScoreQueryBuilder getBuilderPrototype() { + return PROTOTYPE; } - - private CombineFunction parseBoostMode(QueryParseContext parseContext, XContentParser parser) throws IOException { - String boostMode = parser.text(); - CombineFunction cf = combineFunctionsMap.get(boostMode); - if (cf == null) { - throw new ParsingException(parseContext, "failed to parse [{}] query. illegal boost_mode [{}]", NAME, boostMode); - } - return cf; - } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/index/query/functionscore/ScoreFunctionBuilder.java b/core/src/main/java/org/elasticsearch/index/query/functionscore/ScoreFunctionBuilder.java index e4fc5cdac8c..c2346cc6a31 100644 --- a/core/src/main/java/org/elasticsearch/index/query/functionscore/ScoreFunctionBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/functionscore/ScoreFunctionBuilder.java @@ -19,21 +19,32 @@ package org.elasticsearch.index.query.functionscore; +import org.elasticsearch.common.io.stream.NamedWriteable; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.lucene.search.function.ScoreFunction; +import org.elasticsearch.common.lucene.search.function.WeightFactorFunction; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.query.QueryShardContext; import java.io.IOException; +import java.util.Objects; -public abstract class ScoreFunctionBuilder implements ToXContent { +public abstract class ScoreFunctionBuilder implements ToXContent, NamedWriteable { + + protected Float weight; + + public abstract String getName(); public ScoreFunctionBuilder setWeight(float weight) { this.weight = weight; return this; } - private Float weight; - - public abstract String getName(); + public Float getWeight() { + return weight; + } protected void buildWeight(XContentBuilder builder) throws IOException { if (weight != null) { @@ -49,4 +60,69 @@ public abstract class ScoreFunctionBuilder implements ToXContent { } protected abstract void doXContent(XContentBuilder builder, Params params) throws IOException; + + @Override + public String getWriteableName() { + return getName(); + } + + @Override + public final void writeTo(StreamOutput out) throws IOException { + doWriteTo(out); + if (weight == null) { + out.writeBoolean(false); + } else { + out.writeBoolean(true); + out.writeFloat(weight); + } + } + + protected abstract void doWriteTo(StreamOutput out) throws IOException; + + @Override + public final FB readFrom(StreamInput in) throws IOException { + FB scoreFunctionBuilder = doReadFrom(in); + if (in.readBoolean()) { + scoreFunctionBuilder.setWeight(in.readFloat()); + } + return scoreFunctionBuilder; + } + + protected abstract FB doReadFrom(StreamInput in) throws IOException; + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + @SuppressWarnings("unchecked") + FB other = (FB) obj; + return Objects.equals(weight, other.weight) && + doEquals(other); + } + + protected abstract boolean doEquals(FB functionBuilder); + + @Override + public final int hashCode() { + return Objects.hash(getClass(), weight, doHashCode()); + } + + protected abstract int doHashCode(); + + /** + * Called on a data node, converts a {@link NamedWriteable} score function into its corresponding lucene function object. + */ + public final ScoreFunction toFunction(QueryShardContext context) throws IOException { + ScoreFunction scoreFunction = doToFunction(context); + if (weight == null) { + return scoreFunction; + } + return new WeightFactorFunction(weight, scoreFunction); + } + + protected abstract ScoreFunction doToFunction(QueryShardContext context) throws IOException; } diff --git a/core/src/main/java/org/elasticsearch/index/query/functionscore/ScoreFunctionBuilders.java b/core/src/main/java/org/elasticsearch/index/query/functionscore/ScoreFunctionBuilders.java index 23c1ca17f0f..3c8416a37c5 100644 --- a/core/src/main/java/org/elasticsearch/index/query/functionscore/ScoreFunctionBuilders.java +++ b/core/src/main/java/org/elasticsearch/index/query/functionscore/ScoreFunctionBuilders.java @@ -29,29 +29,41 @@ import org.elasticsearch.index.query.functionscore.weight.WeightBuilder; import org.elasticsearch.script.Script; public class ScoreFunctionBuilders { - + public static ExponentialDecayFunctionBuilder exponentialDecayFunction(String fieldName, Object origin, Object scale) { - return new ExponentialDecayFunctionBuilder(fieldName, origin, scale); + return new ExponentialDecayFunctionBuilder(fieldName, origin, scale, null); } - - public static ExponentialDecayFunctionBuilder exponentialDecayFunction(String fieldName, Object scale) { - return new ExponentialDecayFunctionBuilder(fieldName, null, scale); + + public static ExponentialDecayFunctionBuilder exponentialDecayFunction(String fieldName, Object origin, Object scale, Object offset) { + return new ExponentialDecayFunctionBuilder(fieldName, origin, scale, offset); } - + + public static ExponentialDecayFunctionBuilder exponentialDecayFunction(String fieldName, Object origin, Object scale, Object offset, double decay) { + return new ExponentialDecayFunctionBuilder(fieldName, origin, scale, offset, decay); + } + public static GaussDecayFunctionBuilder gaussDecayFunction(String fieldName, Object origin, Object scale) { - return new GaussDecayFunctionBuilder(fieldName, origin, scale); + return new GaussDecayFunctionBuilder(fieldName, origin, scale, null); } - - public static GaussDecayFunctionBuilder gaussDecayFunction(String fieldName, Object scale) { - return new GaussDecayFunctionBuilder(fieldName, null, scale); + + public static GaussDecayFunctionBuilder gaussDecayFunction(String fieldName, Object origin, Object scale, Object offset) { + return new GaussDecayFunctionBuilder(fieldName, origin, scale, offset); } - + + public static GaussDecayFunctionBuilder gaussDecayFunction(String fieldName, Object origin, Object scale, Object offset, double decay) { + return new GaussDecayFunctionBuilder(fieldName, origin, scale, offset, decay); + } + public static LinearDecayFunctionBuilder linearDecayFunction(String fieldName, Object origin, Object scale) { - return new LinearDecayFunctionBuilder(fieldName, origin, scale); + return new LinearDecayFunctionBuilder(fieldName, origin, scale, null); } - - public static LinearDecayFunctionBuilder linearDecayFunction(String fieldName, Object scale) { - return new LinearDecayFunctionBuilder(fieldName, null, scale); + + public static LinearDecayFunctionBuilder linearDecayFunction(String fieldName, Object origin, Object scale, Object offset) { + return new LinearDecayFunctionBuilder(fieldName, origin, scale, offset); + } + + public static LinearDecayFunctionBuilder linearDecayFunction(String fieldName, Object origin, Object scale, Object offset, double decay) { + return new LinearDecayFunctionBuilder(fieldName, origin, scale, offset, decay); } public static ScriptScoreFunctionBuilder scriptFunction(Script script) { diff --git a/core/src/main/java/org/elasticsearch/index/query/functionscore/ScoreFunctionParser.java b/core/src/main/java/org/elasticsearch/index/query/functionscore/ScoreFunctionParser.java index 546dadaeaca..df76f14e8d8 100644 --- a/core/src/main/java/org/elasticsearch/index/query/functionscore/ScoreFunctionParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/functionscore/ScoreFunctionParser.java @@ -19,16 +19,17 @@ package org.elasticsearch.index.query.functionscore; -import org.elasticsearch.common.lucene.search.function.ScoreFunction; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.common.ParsingException; +import org.elasticsearch.index.query.QueryParseContext; import java.io.IOException; -public interface ScoreFunctionParser { +public interface ScoreFunctionParser> { - ScoreFunction parse(QueryParseContext parseContext, XContentParser parser) throws IOException, ParsingException; + FB fromXContent(QueryParseContext context, XContentParser parser) throws IOException, ParsingException; + + FB getBuilderPrototype(); /** * Returns the name of the function, for example "linear", "gauss" etc. This @@ -36,5 +37,4 @@ public interface ScoreFunctionParser { * {@link FunctionScoreQueryParser}. * */ String[] getNames(); - } diff --git a/core/src/main/java/org/elasticsearch/index/query/functionscore/ScoreFunctionParserMapper.java b/core/src/main/java/org/elasticsearch/index/query/functionscore/ScoreFunctionParserMapper.java index 63854e2af59..c528c0007f2 100644 --- a/core/src/main/java/org/elasticsearch/index/query/functionscore/ScoreFunctionParserMapper.java +++ b/core/src/main/java/org/elasticsearch/index/query/functionscore/ScoreFunctionParserMapper.java @@ -19,15 +19,20 @@ package org.elasticsearch.index.query.functionscore; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.index.query.QueryParseContext; +import java.util.Map; + import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.xcontent.XContentLocation; +import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.functionscore.exp.ExponentialDecayFunctionParser; import org.elasticsearch.index.query.functionscore.fieldvaluefactor.FieldValueFactorFunctionParser; import org.elasticsearch.index.query.functionscore.gauss.GaussDecayFunctionParser; import org.elasticsearch.index.query.functionscore.lin.LinearDecayFunctionParser; import org.elasticsearch.index.query.functionscore.random.RandomScoreFunctionParser; import org.elasticsearch.index.query.functionscore.script.ScriptScoreFunctionParser; +import org.elasticsearch.index.query.functionscore.weight.WeightBuilder; import java.util.Collections; import java.util.HashMap; @@ -36,28 +41,31 @@ import java.util.Set; public class ScoreFunctionParserMapper { - protected Map functionParsers; + protected Map> functionParsers; @Inject - public ScoreFunctionParserMapper(Set parsers) { - Map map = new HashMap<>(); + public ScoreFunctionParserMapper(Set parsers, NamedWriteableRegistry namedWriteableRegistry) { + Map> map = new HashMap<>(); // built-in parsers - addParser(new ScriptScoreFunctionParser(), map); - addParser(new GaussDecayFunctionParser(), map); - addParser(new LinearDecayFunctionParser(), map); - addParser(new ExponentialDecayFunctionParser(), map); - addParser(new RandomScoreFunctionParser(), map); - addParser(new FieldValueFactorFunctionParser(), map); - for (ScoreFunctionParser scoreFunctionParser : parsers) { - addParser(scoreFunctionParser, map); + addParser(new ScriptScoreFunctionParser(), map, namedWriteableRegistry); + addParser(new GaussDecayFunctionParser(), map, namedWriteableRegistry); + addParser(new LinearDecayFunctionParser(), map, namedWriteableRegistry); + addParser(new ExponentialDecayFunctionParser(), map, namedWriteableRegistry); + addParser(new RandomScoreFunctionParser(), map, namedWriteableRegistry); + addParser(new FieldValueFactorFunctionParser(), map, namedWriteableRegistry); + for (ScoreFunctionParser scoreFunctionParser : parsers) { + addParser(scoreFunctionParser, map, namedWriteableRegistry); } this.functionParsers = Collections.unmodifiableMap(map); + //weight doesn't have its own parser, so every function supports it out of the box. + //Can be a single function too when not associated to any other function, which is why it needs to be registered manually here. + namedWriteableRegistry.registerPrototype(ScoreFunctionBuilder.class, new WeightBuilder()); } - public ScoreFunctionParser get(QueryParseContext parseContext, String parserName) { + public ScoreFunctionParser get(XContentLocation contentLocation, String parserName) { ScoreFunctionParser functionParser = get(parserName); if (functionParser == null) { - throw new ParsingException(parseContext, "No function with the name [" + parserName + "] is registered."); + throw new ParsingException(contentLocation, "No function with the name [" + parserName + "] is registered."); } return functionParser; } @@ -66,10 +74,11 @@ public class ScoreFunctionParserMapper { return functionParsers.get(parserName); } - private void addParser(ScoreFunctionParser scoreFunctionParser, Map map) { + private static void addParser(ScoreFunctionParser scoreFunctionParser, Map> map, NamedWriteableRegistry namedWriteableRegistry) { for (String name : scoreFunctionParser.getNames()) { map.put(name, scoreFunctionParser); - } - } + } + namedWriteableRegistry.registerPrototype(ScoreFunctionBuilder.class, scoreFunctionParser.getBuilderPrototype()); + } } diff --git a/core/src/main/java/org/elasticsearch/index/query/functionscore/exp/ExponentialDecayFunctionBuilder.java b/core/src/main/java/org/elasticsearch/index/query/functionscore/exp/ExponentialDecayFunctionBuilder.java index f4a730b8cf4..3c813930b47 100644 --- a/core/src/main/java/org/elasticsearch/index/query/functionscore/exp/ExponentialDecayFunctionBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/functionscore/exp/ExponentialDecayFunctionBuilder.java @@ -20,12 +20,30 @@ package org.elasticsearch.index.query.functionscore.exp; +import org.apache.lucene.search.Explanation; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.index.query.functionscore.DecayFunction; import org.elasticsearch.index.query.functionscore.DecayFunctionBuilder; -public class ExponentialDecayFunctionBuilder extends DecayFunctionBuilder { +public class ExponentialDecayFunctionBuilder extends DecayFunctionBuilder { - public ExponentialDecayFunctionBuilder(String fieldName, Object origin, Object scale) { - super(fieldName, origin, scale); + private static final DecayFunction EXP_DECAY_FUNCTION = new ExponentialDecayScoreFunction(); + + public ExponentialDecayFunctionBuilder(String fieldName, Object origin, Object scale, Object offset) { + super(fieldName, origin, scale, offset); + } + + public ExponentialDecayFunctionBuilder(String fieldName, Object origin, Object scale, Object offset, double decay) { + super(fieldName, origin, scale, offset, decay); + } + + private ExponentialDecayFunctionBuilder(String fieldName, BytesReference functionBytes) { + super(fieldName, functionBytes); + } + + @Override + protected ExponentialDecayFunctionBuilder createFunctionBuilder(String fieldName, BytesReference functionBytes) { + return new ExponentialDecayFunctionBuilder(fieldName, functionBytes); } @Override @@ -33,4 +51,41 @@ public class ExponentialDecayFunctionBuilder extends DecayFunctionBuilder { return ExponentialDecayFunctionParser.NAMES[0]; } + @Override + public DecayFunction getDecayFunction() { + return EXP_DECAY_FUNCTION; + } + + private static final class ExponentialDecayScoreFunction implements DecayFunction { + + @Override + public double evaluate(double value, double scale) { + return Math.exp(scale * value); + } + + @Override + public Explanation explainFunction(String valueExpl, double value, double scale) { + return Explanation.match( + (float) evaluate(value, scale), + "exp(- " + valueExpl + " * " + -1 * scale + ")"); + } + + @Override + public double processScale(double scale, double decay) { + return Math.log(decay) / scale; + } + + @Override + public int hashCode() { + return this.getClass().hashCode(); + } + + @Override + public boolean equals(Object obj) { + if (super.equals(obj)) { + return true; + } + return obj != null && getClass() != obj.getClass(); + } + } } diff --git a/core/src/main/java/org/elasticsearch/index/query/functionscore/exp/ExponentialDecayFunctionParser.java b/core/src/main/java/org/elasticsearch/index/query/functionscore/exp/ExponentialDecayFunctionParser.java index bab04d4a1dc..ab2661ed9b9 100644 --- a/core/src/main/java/org/elasticsearch/index/query/functionscore/exp/ExponentialDecayFunctionParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/functionscore/exp/ExponentialDecayFunctionParser.java @@ -19,11 +19,11 @@ package org.elasticsearch.index.query.functionscore.exp; -import org.apache.lucene.search.Explanation; -import org.elasticsearch.index.query.functionscore.DecayFunction; import org.elasticsearch.index.query.functionscore.DecayFunctionParser; -public class ExponentialDecayFunctionParser extends DecayFunctionParser { +public class ExponentialDecayFunctionParser extends DecayFunctionParser { + + private static final ExponentialDecayFunctionBuilder PROTOTYPE = new ExponentialDecayFunctionBuilder("", "", "", ""); public static final String[] NAMES = { "exp" }; @@ -32,31 +32,8 @@ public class ExponentialDecayFunctionParser extends DecayFunctionParser { return NAMES; } - static final DecayFunction decayFunction = new ExponentialDecayScoreFunction(); - @Override - public DecayFunction getDecayFunction() { - return decayFunction; - } - - final static class ExponentialDecayScoreFunction implements DecayFunction { - - @Override - public double evaluate(double value, double scale) { - return Math.exp(scale * value); - } - - @Override - public Explanation explainFunction(String valueExpl, double value, double scale) { - return Explanation.match( - (float) evaluate(value, scale), - "exp(- " + valueExpl + " * " + -1 * scale + ")"); - } - - @Override - public double processScale(double scale, double decay) { - return Math.log(decay) / scale; - } - + public ExponentialDecayFunctionBuilder getBuilderPrototype() { + return PROTOTYPE; } } diff --git a/core/src/main/java/org/elasticsearch/index/query/functionscore/fieldvaluefactor/FieldValueFactorFunctionBuilder.java b/core/src/main/java/org/elasticsearch/index/query/functionscore/fieldvaluefactor/FieldValueFactorFunctionBuilder.java index 5d38c5a5eb5..f34484a3688 100644 --- a/core/src/main/java/org/elasticsearch/index/query/functionscore/fieldvaluefactor/FieldValueFactorFunctionBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/functionscore/fieldvaluefactor/FieldValueFactorFunctionBuilder.java @@ -19,24 +19,39 @@ package org.elasticsearch.index.query.functionscore.fieldvaluefactor; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lucene.search.function.FieldValueFactorFunction; +import org.elasticsearch.common.lucene.search.function.ScoreFunction; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.fielddata.IndexNumericFieldData; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder; import java.io.IOException; import java.util.Locale; +import java.util.Objects; /** * Builder to construct {@code field_value_factor} functions for a function * score query. */ -public class FieldValueFactorFunctionBuilder extends ScoreFunctionBuilder { - private String field = null; - private Float factor = null; - private Double missing = null; - private FieldValueFactorFunction.Modifier modifier = null; +public class FieldValueFactorFunctionBuilder extends ScoreFunctionBuilder { + + public static final FieldValueFactorFunction.Modifier DEFAULT_MODIFIER = FieldValueFactorFunction.Modifier.NONE; + public static final float DEFAULT_FACTOR = 1; + + private final String field; + private float factor = DEFAULT_FACTOR; + private Double missing; + private FieldValueFactorFunction.Modifier modifier = DEFAULT_MODIFIER; public FieldValueFactorFunctionBuilder(String fieldName) { + if (fieldName == null) { + throw new IllegalArgumentException("field_value_factor: field must not be null"); + } this.field = fieldName; } @@ -45,11 +60,19 @@ public class FieldValueFactorFunctionBuilder extends ScoreFunctionBuilder { return FieldValueFactorFunctionParser.NAMES[0]; } + public String fieldName() { + return this.field; + } + public FieldValueFactorFunctionBuilder factor(float boostFactor) { this.factor = boostFactor; return this; } + public float factor() { + return this.factor; + } + /** * Value used instead of the field value for documents that don't have that field defined. */ @@ -58,29 +81,82 @@ public class FieldValueFactorFunctionBuilder extends ScoreFunctionBuilder { return this; } + public Double missing() { + return this.missing; + } + public FieldValueFactorFunctionBuilder modifier(FieldValueFactorFunction.Modifier modifier) { + if (modifier == null) { + throw new IllegalArgumentException("field_value_factor: modifier must not be null"); + } this.modifier = modifier; return this; } + public FieldValueFactorFunction.Modifier modifier() { + return this.modifier; + } + @Override public void doXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(getName()); - if (field != null) { - builder.field("field", field); - } - - if (factor != null) { - builder.field("factor", factor); - } - + builder.field("field", field); + builder.field("factor", factor); if (missing != null) { builder.field("missing", missing); } - - if (modifier != null) { - builder.field("modifier", modifier.toString().toLowerCase(Locale.ROOT)); - } + builder.field("modifier", modifier.name().toLowerCase(Locale.ROOT)); builder.endObject(); } + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + out.writeString(field); + out.writeFloat(factor); + if (missing == null) { + out.writeBoolean(false); + } else { + out.writeBoolean(true); + out.writeDouble(missing); + } + modifier.writeTo(out); + } + + @Override + protected FieldValueFactorFunctionBuilder doReadFrom(StreamInput in) throws IOException { + FieldValueFactorFunctionBuilder functionBuilder = new FieldValueFactorFunctionBuilder(in.readString()); + functionBuilder.factor = in.readFloat(); + if (in.readBoolean()) { + functionBuilder.missing = in.readDouble(); + } + functionBuilder.modifier = FieldValueFactorFunction.Modifier.readModifierFrom(in); + return functionBuilder; + } + + @Override + protected boolean doEquals(FieldValueFactorFunctionBuilder functionBuilder) { + return Objects.equals(this.field, functionBuilder.field) && + Objects.equals(this.factor, functionBuilder.factor) && + Objects.equals(this.missing, functionBuilder.missing) && + Objects.equals(this.modifier, functionBuilder.modifier); + } + + @Override + protected int doHashCode() { + return Objects.hash(this.field, this.factor, this.missing, this.modifier); + } + + @Override + protected ScoreFunction doToFunction(QueryShardContext context) { + MappedFieldType fieldType = context.mapperService().smartNameFieldType(field); + IndexNumericFieldData fieldData = null; + if (fieldType == null) { + if(missing == null) { + throw new ElasticsearchException("Unable to find a field mapper for field [" + field + "]. No 'missing' value defined."); + } + } else { + fieldData = context.getForField(fieldType); + } + return new FieldValueFactorFunction(field, factor, modifier, missing, fieldData); + } } diff --git a/core/src/main/java/org/elasticsearch/index/query/functionscore/fieldvaluefactor/FieldValueFactorFunctionParser.java b/core/src/main/java/org/elasticsearch/index/query/functionscore/fieldvaluefactor/FieldValueFactorFunctionParser.java index 4e81e11616d..06d6ba8f560 100644 --- a/core/src/main/java/org/elasticsearch/index/query/functionscore/fieldvaluefactor/FieldValueFactorFunctionParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/functionscore/fieldvaluefactor/FieldValueFactorFunctionParser.java @@ -19,19 +19,13 @@ package org.elasticsearch.index.query.functionscore.fieldvaluefactor; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.lucene.search.function.FieldValueFactorFunction; -import org.elasticsearch.common.lucene.search.function.ScoreFunction; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.fielddata.IndexNumericFieldData; -import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.common.ParsingException; import org.elasticsearch.index.query.functionscore.ScoreFunctionParser; -import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; -import java.util.Locale; /** * Parses out a function_score function that looks like: @@ -47,15 +41,16 @@ import java.util.Locale; * } * */ -public class FieldValueFactorFunctionParser implements ScoreFunctionParser { +public class FieldValueFactorFunctionParser implements ScoreFunctionParser { public static String[] NAMES = { "field_value_factor", "fieldValueFactor" }; - @Override - public ScoreFunction parse(QueryParseContext parseContext, XContentParser parser) throws IOException, ParsingException { + private static final FieldValueFactorFunctionBuilder PROTOTYPE = new FieldValueFactorFunctionBuilder(""); + @Override + public FieldValueFactorFunctionBuilder fromXContent(QueryParseContext parseContext, XContentParser parser) throws IOException, ParsingException { String currentFieldName = null; String field = null; - float boostFactor = 1; + float boostFactor = FieldValueFactorFunctionBuilder.DEFAULT_FACTOR; FieldValueFactorFunction.Modifier modifier = FieldValueFactorFunction.Modifier.NONE; Double missing = null; XContentParser.Token token; @@ -68,36 +63,35 @@ public class FieldValueFactorFunctionParser implements ScoreFunctionParser { } else if ("factor".equals(currentFieldName)) { boostFactor = parser.floatValue(); } else if ("modifier".equals(currentFieldName)) { - modifier = FieldValueFactorFunction.Modifier.valueOf(parser.text().toUpperCase(Locale.ROOT)); + modifier = FieldValueFactorFunction.Modifier.fromString(parser.text()); } else if ("missing".equals(currentFieldName)) { missing = parser.doubleValue(); } else { - throw new ParsingException(parseContext, NAMES[0] + " query does not support [" + currentFieldName + "]"); + throw new ParsingException(parser.getTokenLocation(), NAMES[0] + " query does not support [" + currentFieldName + "]"); } } else if("factor".equals(currentFieldName) && (token == XContentParser.Token.START_ARRAY || token == XContentParser.Token.START_OBJECT)) { - throw new ParsingException(parseContext, "[" + NAMES[0] + "] field 'factor' does not support lists or objects"); + throw new ParsingException(parser.getTokenLocation(), "[" + NAMES[0] + "] field 'factor' does not support lists or objects"); } } if (field == null) { - throw new ParsingException(parseContext, "[" + NAMES[0] + "] required field 'field' missing"); + throw new ParsingException(parser.getTokenLocation(), "[" + NAMES[0] + "] required field 'field' missing"); } - SearchContext searchContext = SearchContext.current(); - MappedFieldType fieldType = searchContext.mapperService().smartNameFieldType(field); - IndexNumericFieldData fieldData = null; - if (fieldType == null) { - if(missing == null) { - throw new ElasticsearchException("Unable to find a field mapper for field [" + field + "]. No 'missing' value defined."); - } - } else { - fieldData = searchContext.fieldData().getForField(fieldType); + FieldValueFactorFunctionBuilder fieldValueFactorFunctionBuilder = new FieldValueFactorFunctionBuilder(field).factor(boostFactor).modifier(modifier); + if (missing != null) { + fieldValueFactorFunctionBuilder.missing(missing); } - return new FieldValueFactorFunction(field, boostFactor, modifier, missing, fieldData); + return fieldValueFactorFunctionBuilder; } @Override public String[] getNames() { return NAMES; } + + @Override + public FieldValueFactorFunctionBuilder getBuilderPrototype() { + return PROTOTYPE; + } } diff --git a/core/src/main/java/org/elasticsearch/index/query/functionscore/gauss/GaussDecayFunctionBuilder.java b/core/src/main/java/org/elasticsearch/index/query/functionscore/gauss/GaussDecayFunctionBuilder.java index b9d6708583d..621b22a583d 100644 --- a/core/src/main/java/org/elasticsearch/index/query/functionscore/gauss/GaussDecayFunctionBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/functionscore/gauss/GaussDecayFunctionBuilder.java @@ -20,12 +20,30 @@ package org.elasticsearch.index.query.functionscore.gauss; +import org.apache.lucene.search.Explanation; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.index.query.functionscore.DecayFunction; import org.elasticsearch.index.query.functionscore.DecayFunctionBuilder; -public class GaussDecayFunctionBuilder extends DecayFunctionBuilder { +public class GaussDecayFunctionBuilder extends DecayFunctionBuilder { - public GaussDecayFunctionBuilder(String fieldName, Object origin, Object scale) { - super(fieldName, origin, scale); + private static final DecayFunction GAUSS_DECAY_FUNCTION = new GaussScoreFunction(); + + public GaussDecayFunctionBuilder(String fieldName, Object origin, Object scale, Object offset) { + super(fieldName, origin, scale, offset); + } + + public GaussDecayFunctionBuilder(String fieldName, Object origin, Object scale, Object offset, double decay) { + super(fieldName, origin, scale, offset, decay); + } + + private GaussDecayFunctionBuilder(String fieldName, BytesReference functionBytes) { + super(fieldName, functionBytes); + } + + @Override + protected GaussDecayFunctionBuilder createFunctionBuilder(String fieldName, BytesReference functionBytes) { + return new GaussDecayFunctionBuilder(fieldName, functionBytes); } @Override @@ -33,4 +51,43 @@ public class GaussDecayFunctionBuilder extends DecayFunctionBuilder { return GaussDecayFunctionParser.NAMES[0]; } + @Override + public DecayFunction getDecayFunction() { + return GAUSS_DECAY_FUNCTION; + } + + private static final class GaussScoreFunction implements DecayFunction { + + @Override + public double evaluate(double value, double scale) { + // note that we already computed scale^2 in processScale() so we do + // not need to square it here. + return Math.exp(0.5 * Math.pow(value, 2.0) / scale); + } + + @Override + public Explanation explainFunction(String valueExpl, double value, double scale) { + return Explanation.match( + (float) evaluate(value, scale), + "exp(-0.5*pow(" + valueExpl + ",2.0)/" + -1 * scale + ")"); + } + + @Override + public double processScale(double scale, double decay) { + return 0.5 * Math.pow(scale, 2.0) / Math.log(decay); + } + + @Override + public int hashCode() { + return this.getClass().hashCode(); + } + + @Override + public boolean equals(Object obj) { + if (super.equals(obj)) { + return true; + } + return obj != null && getClass() != obj.getClass(); + } + } } diff --git a/core/src/main/java/org/elasticsearch/index/query/functionscore/gauss/GaussDecayFunctionParser.java b/core/src/main/java/org/elasticsearch/index/query/functionscore/gauss/GaussDecayFunctionParser.java index 614050a8fbe..6304a62b8f8 100644 --- a/core/src/main/java/org/elasticsearch/index/query/functionscore/gauss/GaussDecayFunctionParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/functionscore/gauss/GaussDecayFunctionParser.java @@ -19,45 +19,21 @@ package org.elasticsearch.index.query.functionscore.gauss; -import org.apache.lucene.search.Explanation; -import org.elasticsearch.index.query.functionscore.DecayFunction; import org.elasticsearch.index.query.functionscore.DecayFunctionParser; -public class GaussDecayFunctionParser extends DecayFunctionParser { +public class GaussDecayFunctionParser extends DecayFunctionParser { + + private static final GaussDecayFunctionBuilder PROTOTYPE = new GaussDecayFunctionBuilder("", "", "", ""); - static final DecayFunction decayFunction = new GaussScoreFunction(); public static final String[] NAMES = { "gauss" }; - @Override - public DecayFunction getDecayFunction() { - return decayFunction; - } - - final static class GaussScoreFunction implements DecayFunction { - - @Override - public double evaluate(double value, double scale) { - // note that we already computed scale^2 in processScale() so we do - // not need to square it here. - return Math.exp(0.5 * Math.pow(value, 2.0) / scale); - } - - @Override - public Explanation explainFunction(String valueExpl, double value, double scale) { - return Explanation.match( - (float) evaluate(value, scale), - "exp(-0.5*pow(" + valueExpl + ",2.0)/" + -1 * scale + ")"); - } - - @Override - public double processScale(double scale, double decay) { - return 0.5 * Math.pow(scale, 2.0) / Math.log(decay); - } - } - @Override public String[] getNames() { return NAMES; } + @Override + public GaussDecayFunctionBuilder getBuilderPrototype() { + return PROTOTYPE; + } } diff --git a/core/src/main/java/org/elasticsearch/index/query/functionscore/lin/LinearDecayFunctionBuilder.java b/core/src/main/java/org/elasticsearch/index/query/functionscore/lin/LinearDecayFunctionBuilder.java index dcb4eff5b5a..2e63aedd84b 100644 --- a/core/src/main/java/org/elasticsearch/index/query/functionscore/lin/LinearDecayFunctionBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/functionscore/lin/LinearDecayFunctionBuilder.java @@ -19,12 +19,25 @@ package org.elasticsearch.index.query.functionscore.lin; +import org.apache.lucene.search.Explanation; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.index.query.functionscore.DecayFunction; import org.elasticsearch.index.query.functionscore.DecayFunctionBuilder; -public class LinearDecayFunctionBuilder extends DecayFunctionBuilder { +public class LinearDecayFunctionBuilder extends DecayFunctionBuilder { - public LinearDecayFunctionBuilder(String fieldName, Object origin, Object scale) { - super(fieldName, origin, scale); + private static final DecayFunction LINEAR_DECAY_FUNCTION = new LinearDecayScoreFunction(); + + public LinearDecayFunctionBuilder(String fieldName, Object origin, Object scale, Object offset) { + super(fieldName, origin, scale, offset); + } + + public LinearDecayFunctionBuilder(String fieldName, Object origin, Object scale, Object offset, double decay) { + super(fieldName, origin, scale, offset, decay); + } + + private LinearDecayFunctionBuilder(String fieldName, BytesReference functionBytes) { + super(fieldName, functionBytes); } @Override @@ -32,4 +45,46 @@ public class LinearDecayFunctionBuilder extends DecayFunctionBuilder { return LinearDecayFunctionParser.NAMES[0]; } + @Override + protected LinearDecayFunctionBuilder createFunctionBuilder(String fieldName, BytesReference functionBytes) { + return new LinearDecayFunctionBuilder(fieldName, functionBytes); + } + + @Override + public DecayFunction getDecayFunction() { + return LINEAR_DECAY_FUNCTION; + } + + private static final class LinearDecayScoreFunction implements DecayFunction { + + @Override + public double evaluate(double value, double scale) { + return Math.max(0.0, (scale - value) / scale); + } + + @Override + public Explanation explainFunction(String valueExpl, double value, double scale) { + return Explanation.match( + (float) evaluate(value, scale), + "max(0.0, ((" + scale + " - " + valueExpl + ")/" + scale + ")"); + } + + @Override + public double processScale(double scale, double decay) { + return scale / (1.0 - decay); + } + + @Override + public int hashCode() { + return this.getClass().hashCode(); + } + + @Override + public boolean equals(Object obj) { + if (super.equals(obj)) { + return true; + } + return obj != null && getClass() != obj.getClass(); + } + } } diff --git a/core/src/main/java/org/elasticsearch/index/query/functionscore/lin/LinearDecayFunctionParser.java b/core/src/main/java/org/elasticsearch/index/query/functionscore/lin/LinearDecayFunctionParser.java index 215a7873ae3..569304c7eee 100644 --- a/core/src/main/java/org/elasticsearch/index/query/functionscore/lin/LinearDecayFunctionParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/functionscore/lin/LinearDecayFunctionParser.java @@ -19,44 +19,21 @@ package org.elasticsearch.index.query.functionscore.lin; -import org.apache.lucene.search.Explanation; -import org.elasticsearch.index.query.functionscore.DecayFunction; import org.elasticsearch.index.query.functionscore.DecayFunctionParser; -public class LinearDecayFunctionParser extends DecayFunctionParser { +public class LinearDecayFunctionParser extends DecayFunctionParser { public static final String[] NAMES = { "linear" }; + private static final LinearDecayFunctionBuilder PROTOTYPE = new LinearDecayFunctionBuilder("", "", "", ""); + @Override public String[] getNames() { return NAMES; } - static final DecayFunction decayFunction = new LinearDecayScoreFunction(); - @Override - public DecayFunction getDecayFunction() { - return decayFunction; - } - - final static class LinearDecayScoreFunction implements DecayFunction { - - @Override - public double evaluate(double value, double scale) { - return Math.max(0.0, (scale - value) / scale); - } - - @Override - public Explanation explainFunction(String valueExpl, double value, double scale) { - return Explanation.match( - (float) evaluate(value, scale), - "max(0.0, ((" + scale + " - " + valueExpl + ")/" + scale + ")"); - } - - @Override - public double processScale(double scale, double decay) { - return scale / (1.0 - decay); - } - + public LinearDecayFunctionBuilder getBuilderPrototype() { + return PROTOTYPE; } } diff --git a/core/src/main/java/org/elasticsearch/index/query/functionscore/random/RandomScoreFunctionBuilder.java b/core/src/main/java/org/elasticsearch/index/query/functionscore/random/RandomScoreFunctionBuilder.java index 22285f8cc67..e62aabc1972 100644 --- a/core/src/main/java/org/elasticsearch/index/query/functionscore/random/RandomScoreFunctionBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/functionscore/random/RandomScoreFunctionBuilder.java @@ -18,17 +18,27 @@ */ package org.elasticsearch.index.query.functionscore.random; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.lucene.search.function.RandomScoreFunction; +import org.elasticsearch.common.lucene.search.function.ScoreFunction; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.fielddata.IndexFieldData; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; +import java.util.Objects; /** * A function that computes a random score for the matched documents */ -public class RandomScoreFunctionBuilder extends ScoreFunctionBuilder { +public class RandomScoreFunctionBuilder extends ScoreFunctionBuilder { - private Object seed = null; + private Integer seed; public RandomScoreFunctionBuilder() { } @@ -54,7 +64,7 @@ public class RandomScoreFunctionBuilder extends ScoreFunctionBuilder { * @see #seed(int) */ public RandomScoreFunctionBuilder seed(long seed) { - this.seed = seed; + this.seed = hash(seed); return this; } @@ -63,19 +73,64 @@ public class RandomScoreFunctionBuilder extends ScoreFunctionBuilder { * @see #seed(int) */ public RandomScoreFunctionBuilder seed(String seed) { - this.seed = seed; + if (seed == null) { + throw new IllegalArgumentException("random_score function: seed must not be null"); + } + this.seed = seed.hashCode(); return this; } + public Integer getSeed() { + return seed; + } + @Override public void doXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(getName()); - if (seed instanceof Number) { - builder.field("seed", ((Number)seed).longValue()); - } else if (seed != null) { - builder.field("seed", seed.toString()); + if (seed != null) { + builder.field("seed", seed); } builder.endObject(); } + @Override + protected RandomScoreFunctionBuilder doReadFrom(StreamInput in) throws IOException { + RandomScoreFunctionBuilder randomScoreFunctionBuilder = new RandomScoreFunctionBuilder(); + randomScoreFunctionBuilder.seed = in.readInt(); + return randomScoreFunctionBuilder; + } + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + out.writeInt(seed); + } + + @Override + protected boolean doEquals(RandomScoreFunctionBuilder functionBuilder) { + return Objects.equals(this.seed, functionBuilder.seed); + } + + @Override + protected int doHashCode() { + return Objects.hash(this.seed); + } + + @Override + protected ScoreFunction doToFunction(QueryShardContext context) { + final MappedFieldType fieldType = context.mapperService().smartNameFieldType("_uid"); + if (fieldType == null) { + // mapper could be null if we are on a shard with no docs yet, so this won't actually be used + return new RandomScoreFunction(); + } + //TODO find a way to not get the shard_id from the current search context? make it available in QueryShardContext? + //this currently causes NPE in FunctionScoreQueryBuilderTests#testToQuery + final ShardId shardId = SearchContext.current().indexShard().shardId(); + final int salt = (context.index().name().hashCode() << 10) | shardId.id(); + final IndexFieldData uidFieldData = context.getForField(fieldType); + return new RandomScoreFunction(this.seed == null ? hash(context.nowInMillis()) : seed, salt, uidFieldData); + } + + private static int hash(long value) { + return (int) (value ^ (value >>> 32)); + } } diff --git a/core/src/main/java/org/elasticsearch/index/query/functionscore/random/RandomScoreFunctionParser.java b/core/src/main/java/org/elasticsearch/index/query/functionscore/random/RandomScoreFunctionParser.java index 621a087c9e0..9b062d42c1c 100644 --- a/core/src/main/java/org/elasticsearch/index/query/functionscore/random/RandomScoreFunctionParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/functionscore/random/RandomScoreFunctionParser.java @@ -22,23 +22,19 @@ package org.elasticsearch.index.query.functionscore.random; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.lucene.search.function.RandomScoreFunction; -import org.elasticsearch.common.lucene.search.function.ScoreFunction; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.fielddata.IndexFieldData; -import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.common.ParsingException; import org.elasticsearch.index.query.functionscore.ScoreFunctionParser; -import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; -public class RandomScoreFunctionParser implements ScoreFunctionParser { +public class RandomScoreFunctionParser implements ScoreFunctionParser { public static String[] NAMES = { "random_score", "randomScore" }; + private static RandomScoreFunctionBuilder PROTOTYPE = new RandomScoreFunctionBuilder(); + @Inject public RandomScoreFunctionParser() { } @@ -49,10 +45,8 @@ public class RandomScoreFunctionParser implements ScoreFunctionParser { } @Override - public ScoreFunction parse(QueryParseContext parseContext, XContentParser parser) throws IOException, ParsingException { - - int seed = -1; - + public RandomScoreFunctionBuilder fromXContent(QueryParseContext parseContext, XContentParser parser) throws IOException, ParsingException { + RandomScoreFunctionBuilder randomScoreFunctionBuilder = new RandomScoreFunctionBuilder(); String currentFieldName = null; XContentParser.Token token; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { @@ -62,42 +56,29 @@ public class RandomScoreFunctionParser implements ScoreFunctionParser { if ("seed".equals(currentFieldName)) { if (token == XContentParser.Token.VALUE_NUMBER) { if (parser.numberType() == XContentParser.NumberType.INT) { - seed = parser.intValue(); + randomScoreFunctionBuilder.seed(parser.intValue()); } else if (parser.numberType() == XContentParser.NumberType.LONG) { - seed = hash(parser.longValue()); + randomScoreFunctionBuilder.seed(parser.longValue()); } else { - throw new ParsingException(parseContext, "random_score seed must be an int, long or string, not '" + throw new ParsingException(parser.getTokenLocation(), "random_score seed must be an int, long or string, not '" + token.toString() + "'"); } } else if (token == XContentParser.Token.VALUE_STRING) { - seed = parser.text().hashCode(); + randomScoreFunctionBuilder.seed(parser.text()); } else { - throw new ParsingException(parseContext, "random_score seed must be an int/long or string, not '" + throw new ParsingException(parser.getTokenLocation(), "random_score seed must be an int/long or string, not '" + token.toString() + "'"); } } else { - throw new ParsingException(parseContext, NAMES[0] + " query does not support [" + currentFieldName + "]"); + throw new ParsingException(parser.getTokenLocation(), NAMES[0] + " query does not support [" + currentFieldName + "]"); } } } - - final MappedFieldType fieldType = SearchContext.current().mapperService().smartNameFieldType("_uid"); - if (fieldType == null) { - // mapper could be null if we are on a shard with no docs yet, so this won't actually be used - return new RandomScoreFunction(); - } - - if (seed == -1) { - seed = hash(parseContext.nowInMillis()); - } - final ShardId shardId = SearchContext.current().indexShard().shardId(); - final int salt = (shardId.index().name().hashCode() << 10) | shardId.id(); - final IndexFieldData uidFieldData = SearchContext.current().fieldData().getForField(fieldType); - - return new RandomScoreFunction(seed, salt, uidFieldData); + return randomScoreFunctionBuilder; } - private static final int hash(long value) { - return (int) (value ^ (value >>> 32)); + @Override + public RandomScoreFunctionBuilder getBuilderPrototype() { + return PROTOTYPE; } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/index/query/functionscore/script/ScriptScoreFunctionBuilder.java b/core/src/main/java/org/elasticsearch/index/query/functionscore/script/ScriptScoreFunctionBuilder.java index 023d8a6e5e1..266e75f6ce2 100644 --- a/core/src/main/java/org/elasticsearch/index/query/functionscore/script/ScriptScoreFunctionBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/functionscore/script/ScriptScoreFunctionBuilder.java @@ -19,20 +19,27 @@ package org.elasticsearch.index.query.functionscore.script; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.lucene.search.function.ScoreFunction; +import org.elasticsearch.common.lucene.search.function.ScriptScoreFunction; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.query.QueryShardContext; +import org.elasticsearch.index.query.QueryShardException; import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder; import org.elasticsearch.script.Script; import org.elasticsearch.script.Script.ScriptField; +import org.elasticsearch.script.ScriptContext; +import org.elasticsearch.script.SearchScript; import java.io.IOException; -import java.util.HashMap; -import java.util.Map; +import java.util.Objects; /** * A function that uses a script to compute or influence the score of documents * that match with the inner query or filter. */ -public class ScriptScoreFunctionBuilder extends ScoreFunctionBuilder { +public class ScriptScoreFunctionBuilder extends ScoreFunctionBuilder { private final Script script; @@ -43,6 +50,10 @@ public class ScriptScoreFunctionBuilder extends ScoreFunctionBuilder { this.script = script; } + public Script getScript() { + return this.script; + } + @Override public void doXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(getName()); @@ -54,4 +65,34 @@ public class ScriptScoreFunctionBuilder extends ScoreFunctionBuilder { public String getName() { return ScriptScoreFunctionParser.NAMES[0]; } + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + script.writeTo(out); + } + + @Override + protected ScriptScoreFunctionBuilder doReadFrom(StreamInput in) throws IOException { + return new ScriptScoreFunctionBuilder(Script.readScript(in)); + } + + @Override + protected boolean doEquals(ScriptScoreFunctionBuilder functionBuilder) { + return Objects.equals(this.script, functionBuilder.script); + } + + @Override + protected int doHashCode() { + return Objects.hash(this.script); + } + + @Override + protected ScoreFunction doToFunction(QueryShardContext context) { + try { + SearchScript searchScript = context.scriptService().search(context.lookup(), script, ScriptContext.Standard.SEARCH); + return new ScriptScoreFunction(script, searchScript); + } catch (Exception e) { + throw new QueryShardException(context, "script_score: the script could not be loaded", e); + } + } } \ No newline at end of file diff --git a/core/src/main/java/org/elasticsearch/index/query/functionscore/script/ScriptScoreFunctionParser.java b/core/src/main/java/org/elasticsearch/index/query/functionscore/script/ScriptScoreFunctionParser.java index be773655460..d0dbcccb625 100644 --- a/core/src/main/java/org/elasticsearch/index/query/functionscore/script/ScriptScoreFunctionParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/functionscore/script/ScriptScoreFunctionParser.java @@ -21,19 +21,14 @@ package org.elasticsearch.index.query.functionscore.script; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.lucene.search.function.ScoreFunction; -import org.elasticsearch.common.lucene.search.function.ScriptScoreFunction; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.common.ParsingException; import org.elasticsearch.index.query.functionscore.ScoreFunctionParser; import org.elasticsearch.script.Script; import org.elasticsearch.script.Script.ScriptField; -import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptParameterParser; import org.elasticsearch.script.ScriptParameterParser.ScriptParameterValue; -import org.elasticsearch.script.SearchScript; import java.io.IOException; import java.util.HashMap; @@ -42,13 +37,11 @@ import java.util.Map; /** * */ -public class ScriptScoreFunctionParser implements ScoreFunctionParser { +public class ScriptScoreFunctionParser implements ScoreFunctionParser { public static String[] NAMES = { "script_score", "scriptScore" }; - @Inject - public ScriptScoreFunctionParser() { - } + private static final ScriptScoreFunctionBuilder PROTOTYPE = new ScriptScoreFunctionBuilder(new Script("")); @Override public String[] getNames() { @@ -56,7 +49,7 @@ public class ScriptScoreFunctionParser implements ScoreFunctionParser { } @Override - public ScoreFunction parse(QueryParseContext parseContext, XContentParser parser) throws IOException, ParsingException { + public ScriptScoreFunctionBuilder fromXContent(QueryParseContext parseContext, XContentParser parser) throws IOException, ParsingException { ScriptParameterParser scriptParameterParser = new ScriptParameterParser(); Script script = null; Map vars = null; @@ -71,11 +64,11 @@ public class ScriptScoreFunctionParser implements ScoreFunctionParser { } else if ("params".equals(currentFieldName)) { // TODO remove in 3.0 (here to support old script APIs) vars = parser.map(); } else { - throw new ParsingException(parseContext, NAMES[0] + " query does not support [" + currentFieldName + "]"); + throw new ParsingException(parser.getTokenLocation(), NAMES[0] + " query does not support [" + currentFieldName + "]"); } } else if (token.isValue()) { if (!scriptParameterParser.token(currentFieldName, token, parser, parseContext.parseFieldMatcher())) { - throw new ParsingException(parseContext, NAMES[0] + " query does not support [" + currentFieldName + "]"); + throw new ParsingException(parser.getTokenLocation(), NAMES[0] + " query does not support [" + currentFieldName + "]"); } } } @@ -89,19 +82,18 @@ public class ScriptScoreFunctionParser implements ScoreFunctionParser { script = new Script(scriptValue.script(), scriptValue.scriptType(), scriptParameterParser.lang(), vars); } } else if (vars != null) { - throw new ParsingException(parseContext, "script params must be specified inside script object"); + throw new ParsingException(parser.getTokenLocation(), "script params must be specified inside script object"); } if (script == null) { - throw new ParsingException(parseContext, NAMES[0] + " requires 'script' field"); + throw new ParsingException(parser.getTokenLocation(), NAMES[0] + " requires 'script' field"); } - SearchScript searchScript; - try { - searchScript = parseContext.scriptService().search(parseContext.lookup(), script, ScriptContext.Standard.SEARCH); - return new ScriptScoreFunction(script, searchScript); - } catch (Exception e) { - throw new ParsingException(parseContext, NAMES[0] + " the script could not be loaded", e); - } + return new ScriptScoreFunctionBuilder(script); + } + + @Override + public ScriptScoreFunctionBuilder getBuilderPrototype() { + return PROTOTYPE; } } \ No newline at end of file diff --git a/core/src/main/java/org/elasticsearch/index/query/functionscore/weight/WeightBuilder.java b/core/src/main/java/org/elasticsearch/index/query/functionscore/weight/WeightBuilder.java index 5b6284326a6..a49eb19e323 100644 --- a/core/src/main/java/org/elasticsearch/index/query/functionscore/weight/WeightBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/functionscore/weight/WeightBuilder.java @@ -19,8 +19,11 @@ package org.elasticsearch.index.query.functionscore.weight; -import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.lucene.search.function.ScoreFunction; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder; import java.io.IOException; @@ -28,7 +31,7 @@ import java.io.IOException; /** * A query that multiplies the weight to the score. */ -public class WeightBuilder extends ScoreFunctionBuilder { +public class WeightBuilder extends ScoreFunctionBuilder { @Override public String getName() { @@ -38,4 +41,30 @@ public class WeightBuilder extends ScoreFunctionBuilder { @Override protected void doXContent(XContentBuilder builder, Params params) throws IOException { } + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + + } + + @Override + protected WeightBuilder doReadFrom(StreamInput in) throws IOException { + return new WeightBuilder(); + } + + @Override + protected boolean doEquals(WeightBuilder functionBuilder) { + return true; + } + + @Override + protected int doHashCode() { + return 0; + } + + @Override + protected ScoreFunction doToFunction(QueryShardContext context) throws IOException { + //nothing to do here, weight will be applied by the parent class, no score function + return null; + } } diff --git a/core/src/main/java/org/elasticsearch/index/query/support/BaseInnerHitBuilder.java b/core/src/main/java/org/elasticsearch/index/query/support/BaseInnerHitBuilder.java deleted file mode 100644 index 48a2f59924e..00000000000 --- a/core/src/main/java/org/elasticsearch/index/query/support/BaseInnerHitBuilder.java +++ /dev/null @@ -1,376 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.query.support; - -import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.xcontent.ToXContent; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.script.Script; -import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.search.highlight.HighlightBuilder; -import org.elasticsearch.search.sort.SortBuilder; -import org.elasticsearch.search.sort.SortOrder; - -import java.io.IOException; -import java.util.Map; - -/** - */ -@SuppressWarnings("unchecked") -public abstract class BaseInnerHitBuilder implements ToXContent { - - protected SearchSourceBuilder sourceBuilder; - - /** - * The index to start to return hits from. Defaults to 0. - */ - public T setFrom(int from) { - sourceBuilder().from(from); - return (T) this; - } - - - /** - * The number of search hits to return. Defaults to 10. - */ - public T setSize(int size) { - sourceBuilder().size(size); - return (T) this; - } - - /** - * Applies when sorting, and controls if scores will be tracked as well. Defaults to - * false. - */ - public T setTrackScores(boolean trackScores) { - sourceBuilder().trackScores(trackScores); - return (T) this; - } - - /** - * Should each {@link org.elasticsearch.search.SearchHit} be returned with an - * explanation of the hit (ranking). - */ - public T setExplain(boolean explain) { - sourceBuilder().explain(explain); - return (T) this; - } - - /** - * Should each {@link org.elasticsearch.search.SearchHit} be returned with its - * version. - */ - public T setVersion(boolean version) { - sourceBuilder().version(version); - return (T) this; - } - - /** - * Add a stored field to be loaded and returned with the inner hit. - */ - public T field(String name) { - sourceBuilder().field(name); - return (T) this; - } - - /** - * Sets no fields to be loaded, resulting in only id and type to be returned per field. - */ - public T setNoFields() { - sourceBuilder().noFields(); - return (T) this; - } - - /** - * Indicates whether the response should contain the stored _source for every hit - */ - public T setFetchSource(boolean fetch) { - sourceBuilder().fetchSource(fetch); - return (T) this; - } - - /** - * Indicate that _source should be returned with every hit, with an "include" and/or "exclude" set which can include simple wildcard - * elements. - * - * @param include An optional include (optionally wildcarded) pattern to filter the returned _source - * @param exclude An optional exclude (optionally wildcarded) pattern to filter the returned _source - */ - public T setFetchSource(@Nullable String include, @Nullable String exclude) { - sourceBuilder().fetchSource(include, exclude); - return (T) this; - } - - /** - * Indicate that _source should be returned with every hit, with an "include" and/or "exclude" set which can include simple wildcard - * elements. - * - * @param includes An optional list of include (optionally wildcarded) pattern to filter the returned _source - * @param excludes An optional list of exclude (optionally wildcarded) pattern to filter the returned _source - */ - public T setFetchSource(@Nullable String[] includes, @Nullable String[] excludes) { - sourceBuilder().fetchSource(includes, excludes); - return (T) this; - } - - /** - * Adds a field data based field to load and return. The field does not have to be stored, - * but its recommended to use non analyzed or numeric fields. - * - * @param name The field to get from the field data cache - */ - public T addFieldDataField(String name) { - sourceBuilder().fieldDataField(name); - return (T) this; - } - - /** - * Adds a script based field to load and return. The field does not have to be stored, - * but its recommended to use non analyzed or numeric fields. - * - * @param name The name that will represent this value in the return hit - * @param script The script to use - */ - public T addScriptField(String name, Script script) { - sourceBuilder().scriptField(name, script); - return (T) this; - } - - /** - * Adds a sort against the given field name and the sort ordering. - * - * @param field The name of the field - * @param order The sort ordering - */ - public T addSort(String field, SortOrder order) { - sourceBuilder().sort(field, order); - return (T) this; - } - - /** - * Adds a generic sort builder. - * - * @see org.elasticsearch.search.sort.SortBuilders - */ - public T addSort(SortBuilder sort) { - sourceBuilder().sort(sort); - return (T) this; - } - - public HighlightBuilder highlightBuilder() { - return sourceBuilder().highlighter(); - } - - /** - * Adds a field to be highlighted with default fragment size of 100 characters, and - * default number of fragments of 5. - * - * @param name The field to highlight - */ - public T addHighlightedField(String name) { - highlightBuilder().field(name); - return (T) this; - } - - - /** - * Adds a field to be highlighted with a provided fragment size (in characters), and - * default number of fragments of 5. - * - * @param name The field to highlight - * @param fragmentSize The size of a fragment in characters - */ - public T addHighlightedField(String name, int fragmentSize) { - highlightBuilder().field(name, fragmentSize); - return (T) this; - } - - /** - * Adds a field to be highlighted with a provided fragment size (in characters), and - * a provided (maximum) number of fragments. - * - * @param name The field to highlight - * @param fragmentSize The size of a fragment in characters - * @param numberOfFragments The (maximum) number of fragments - */ - public T addHighlightedField(String name, int fragmentSize, int numberOfFragments) { - highlightBuilder().field(name, fragmentSize, numberOfFragments); - return (T) this; - } - - /** - * Adds a field to be highlighted with a provided fragment size (in characters), - * a provided (maximum) number of fragments and an offset for the highlight. - * - * @param name The field to highlight - * @param fragmentSize The size of a fragment in characters - * @param numberOfFragments The (maximum) number of fragments - */ - public T addHighlightedField(String name, int fragmentSize, int numberOfFragments, - int fragmentOffset) { - highlightBuilder().field(name, fragmentSize, numberOfFragments, fragmentOffset); - return (T) this; - } - - /** - * Adds a highlighted field. - */ - public T addHighlightedField(HighlightBuilder.Field field) { - highlightBuilder().field(field); - return (T) this; - } - - /** - * Set a tag scheme that encapsulates a built in pre and post tags. The allows schemes - * are styled and default. - * - * @param schemaName The tag scheme name - */ - public T setHighlighterTagsSchema(String schemaName) { - highlightBuilder().tagsSchema(schemaName); - return (T) this; - } - - public T setHighlighterFragmentSize(Integer fragmentSize) { - highlightBuilder().fragmentSize(fragmentSize); - return (T) this; - } - - public T setHighlighterNumOfFragments(Integer numOfFragments) { - highlightBuilder().numOfFragments(numOfFragments); - return (T) this; - } - - public T setHighlighterFilter(Boolean highlightFilter) { - highlightBuilder().highlightFilter(highlightFilter); - return (T) this; - } - - /** - * The encoder to set for highlighting - */ - public T setHighlighterEncoder(String encoder) { - highlightBuilder().encoder(encoder); - return (T) this; - } - - /** - * Explicitly set the pre tags that will be used for highlighting. - */ - public T setHighlighterPreTags(String... preTags) { - highlightBuilder().preTags(preTags); - return (T) this; - } - - /** - * Explicitly set the post tags that will be used for highlighting. - */ - public T setHighlighterPostTags(String... postTags) { - highlightBuilder().postTags(postTags); - return (T) this; - } - - /** - * The order of fragments per field. By default, ordered by the order in the - * highlighted text. Can be score, which then it will be ordered - * by score of the fragments. - */ - public T setHighlighterOrder(String order) { - highlightBuilder().order(order); - return (T) this; - } - - public T setHighlighterRequireFieldMatch(boolean requireFieldMatch) { - highlightBuilder().requireFieldMatch(requireFieldMatch); - return (T) this; - } - - public T setHighlighterBoundaryMaxScan(Integer boundaryMaxScan) { - highlightBuilder().boundaryMaxScan(boundaryMaxScan); - return (T) this; - } - - public T setHighlighterBoundaryChars(char[] boundaryChars) { - highlightBuilder().boundaryChars(boundaryChars); - return (T) this; - } - - /** - * The highlighter type to use. - */ - public T setHighlighterType(String type) { - highlightBuilder().highlighterType(type); - return (T) this; - } - - public T setHighlighterFragmenter(String fragmenter) { - highlightBuilder().fragmenter(fragmenter); - return (T) this; - } - - /** - * Sets a query to be used for highlighting all fields instead of the search query. - */ - public T setHighlighterQuery(QueryBuilder highlightQuery) { - highlightBuilder().highlightQuery(highlightQuery); - return (T) this; - } - - /** - * Sets the size of the fragment to return from the beginning of the field if there are no matches to - * highlight and the field doesn't also define noMatchSize. - * @param noMatchSize integer to set or null to leave out of request. default is null. - * @return this builder for chaining - */ - public T setHighlighterNoMatchSize(Integer noMatchSize) { - highlightBuilder().noMatchSize(noMatchSize); - return (T) this; - } - - /** - * Sets the maximum number of phrases the fvh will consider if the field doesn't also define phraseLimit. - */ - public T setHighlighterPhraseLimit(Integer phraseLimit) { - highlightBuilder().phraseLimit(phraseLimit); - return (T) this; - } - - public T setHighlighterOptions(Map options) { - highlightBuilder().options(options); - return (T) this; - } - - protected SearchSourceBuilder sourceBuilder() { - if (sourceBuilder == null) { - sourceBuilder = new SearchSourceBuilder(); - } - return sourceBuilder; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - if (sourceBuilder != null) { - sourceBuilder.innerToXContent(builder, params); - } - return builder; - } - -} diff --git a/core/src/main/java/org/elasticsearch/index/query/support/InnerHitsQueryParserHelper.java b/core/src/main/java/org/elasticsearch/index/query/support/InnerHitsQueryParserHelper.java index fd6fc5af706..02b21a1c962 100644 --- a/core/src/main/java/org/elasticsearch/index/query/support/InnerHitsQueryParserHelper.java +++ b/core/src/main/java/org/elasticsearch/index/query/support/InnerHitsQueryParserHelper.java @@ -21,6 +21,8 @@ package org.elasticsearch.index.query.support; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.query.QueryShardContext; +import org.elasticsearch.index.query.QueryShardException; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.common.ParsingException; import org.elasticsearch.search.fetch.fielddata.FieldDataFieldsParseElement; @@ -51,13 +53,12 @@ public class InnerHitsQueryParserHelper { this.fieldDataFieldsParseElement = fieldDataFieldsParseElement; } - public InnerHitsSubSearchContext parse(QueryParseContext parserContext) throws IOException, ParsingException { + public InnerHitsSubSearchContext parse(XContentParser parser) throws IOException { String fieldName = null; XContentParser.Token token; String innerHitName = null; SubSearchContext subSearchContext = new SubSearchContext(SearchContext.current()); try { - XContentParser parser = parserContext.parser(); while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { fieldName = parser.currentName(); @@ -72,7 +73,7 @@ public class InnerHitsQueryParserHelper { } } } catch (Exception e) { - throw new ParsingException(parserContext, "Failed to parse [_inner_hits]", e); + throw new IOException("Failed to parse [_inner_hits]"); } return new InnerHitsSubSearchContext(innerHitName, subSearchContext); } diff --git a/core/src/main/java/org/elasticsearch/index/query/support/NestedInnerQueryParseSupport.java b/core/src/main/java/org/elasticsearch/index/query/support/NestedInnerQueryParseSupport.java index 761341f588b..b47aa2bb9ec 100644 --- a/core/src/main/java/org/elasticsearch/index/query/support/NestedInnerQueryParseSupport.java +++ b/core/src/main/java/org/elasticsearch/index/query/support/NestedInnerQueryParseSupport.java @@ -28,8 +28,9 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.mapper.object.ObjectMapper; +import org.elasticsearch.index.query.QueryShardContext; +import org.elasticsearch.index.query.QueryShardException; import org.elasticsearch.index.query.QueryParseContext; -import org.elasticsearch.common.ParsingException; import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; @@ -41,6 +42,7 @@ import java.io.IOException; */ public class NestedInnerQueryParseSupport { + protected final QueryShardContext shardContext; protected final QueryParseContext parseContext; private BytesReference source; @@ -60,19 +62,22 @@ public class NestedInnerQueryParseSupport { private ObjectMapper parentObjectMapper; public NestedInnerQueryParseSupport(XContentParser parser, SearchContext searchContext) { - parseContext = searchContext.queryParserService().getParseContext(); - parseContext.reset(parser); + parseContext = searchContext.queryParserService().getShardContext().parseContext(); + shardContext = searchContext.queryParserService().getShardContext(); + shardContext.reset(parser); + } - public NestedInnerQueryParseSupport(QueryParseContext parseContext) { - this.parseContext = parseContext; + public NestedInnerQueryParseSupport(QueryShardContext context) { + this.parseContext = context.parseContext(); + this.shardContext = context; } public void query() throws IOException { if (path != null) { setPathLevel(); try { - innerQuery = parseContext.parseInnerQuery(); + innerQuery = parseContext.parseInnerQueryBuilder().toQuery(this.shardContext); } finally { resetPathLevel(); } @@ -87,7 +92,7 @@ public class NestedInnerQueryParseSupport { if (path != null) { setPathLevel(); try { - innerFilter = parseContext.parseInnerFilter(); + innerFilter = parseContext.parseInnerQueryBuilder().toFilter(this.shardContext); } finally { resetPathLevel(); } @@ -103,10 +108,10 @@ public class NestedInnerQueryParseSupport { return innerQuery; } else { if (path == null) { - throw new ParsingException(parseContext, "[nested] requires 'path' field"); + throw new QueryShardException(shardContext, "[nested] requires 'path' field"); } if (!queryFound) { - throw new ParsingException(parseContext, "[nested] requires either 'query' or 'filter' field"); + throw new QueryShardException(shardContext, "[nested] requires either 'query' or 'filter' field"); } XContentParser old = parseContext.parser(); @@ -115,7 +120,7 @@ public class NestedInnerQueryParseSupport { parseContext.parser(innerParser); setPathLevel(); try { - innerQuery = parseContext.parseInnerQuery(); + innerQuery = parseContext.parseInnerQueryBuilder().toQuery(this.shardContext); } finally { resetPathLevel(); } @@ -132,10 +137,10 @@ public class NestedInnerQueryParseSupport { return innerFilter; } else { if (path == null) { - throw new ParsingException(parseContext, "[nested] requires 'path' field"); + throw new QueryShardException(shardContext, "[nested] requires 'path' field"); } if (!filterFound) { - throw new ParsingException(parseContext, "[nested] requires either 'query' or 'filter' field"); + throw new QueryShardException(shardContext, "[nested] requires either 'query' or 'filter' field"); } setPathLevel(); @@ -143,7 +148,7 @@ public class NestedInnerQueryParseSupport { try { XContentParser innerParser = XContentHelper.createParser(source); parseContext.parser(innerParser); - innerFilter = parseContext.parseInnerFilter(); + innerFilter = parseContext.parseInnerQueryBuilder().toFilter(this.shardContext); filterParsed = true; return innerFilter; } finally { @@ -155,12 +160,12 @@ public class NestedInnerQueryParseSupport { public void setPath(String path) { this.path = path; - nestedObjectMapper = parseContext.getObjectMapper(path); + nestedObjectMapper = shardContext.getObjectMapper(path); if (nestedObjectMapper == null) { - throw new ParsingException(parseContext, "[nested] failed to find nested object under path [" + path + "]"); + throw new QueryShardException(shardContext, "[nested] failed to find nested object under path [" + path + "]"); } if (!nestedObjectMapper.nested().isNested()) { - throw new ParsingException(parseContext, "[nested] nested object under path [" + path + "] is not of nested type"); + throw new QueryShardException(shardContext, "[nested] nested object under path [" + path + "] is not of nested type"); } } @@ -185,18 +190,18 @@ public class NestedInnerQueryParseSupport { } private void setPathLevel() { - ObjectMapper objectMapper = parseContext.nestedScope().getObjectMapper(); + ObjectMapper objectMapper = shardContext.nestedScope().getObjectMapper(); if (objectMapper == null) { - parentFilter = parseContext.bitsetFilter(Queries.newNonNestedFilter()); + parentFilter = shardContext.bitsetFilter(Queries.newNonNestedFilter()); } else { - parentFilter = parseContext.bitsetFilter(objectMapper.nestedTypeFilter()); + parentFilter = shardContext.bitsetFilter(objectMapper.nestedTypeFilter()); } childFilter = nestedObjectMapper.nestedTypeFilter(); - parentObjectMapper = parseContext.nestedScope().nextLevel(nestedObjectMapper); + parentObjectMapper = shardContext.nestedScope().nextLevel(nestedObjectMapper); } private void resetPathLevel() { - parseContext.nestedScope().previousLevel(); + shardContext.nestedScope().previousLevel(); } } diff --git a/core/src/main/java/org/elasticsearch/index/query/support/QueryInnerHits.java b/core/src/main/java/org/elasticsearch/index/query/support/QueryInnerHits.java new file mode 100644 index 00000000000..fc9b1541001 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/index/query/support/QueryInnerHits.java @@ -0,0 +1,110 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.index.query.support; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.support.ToXContentToBytes; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.*; +import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.search.fetch.innerhits.InnerHitsBuilder; + +import java.io.IOException; + +/** + */ +public class QueryInnerHits extends ToXContentToBytes implements Writeable { + private final BytesReference queryInnerHitsSearchSource; + + public QueryInnerHits(StreamInput input) throws IOException { + queryInnerHitsSearchSource = input.readBytesReference(); + } + + public QueryInnerHits(XContentParser parser) throws IOException { + BytesStreamOutput out = new BytesStreamOutput(); + try (XContentBuilder builder = XContentFactory.cborBuilder(out)) { + builder.copyCurrentStructure(parser); + queryInnerHitsSearchSource = builder.bytes(); + } + } + + public QueryInnerHits() { + this(null, null); + } + + public QueryInnerHits(String name, InnerHitsBuilder.InnerHit innerHit) { + BytesStreamOutput out = new BytesStreamOutput(); + try (XContentBuilder builder = XContentFactory.cborBuilder(out)) { + builder.startObject(); + if (name != null) { + builder.field("name", name); + } + if (innerHit != null) { + innerHit.toXContent(builder, ToXContent.EMPTY_PARAMS); + } + builder.endObject(); + this.queryInnerHitsSearchSource = builder.bytes(); + } catch (IOException e) { + throw new ElasticsearchException("failed to build xcontent", e); + } + } + + @Override + public QueryInnerHits readFrom(StreamInput in) throws IOException { + return new QueryInnerHits(in); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.field("inner_hits"); + try (XContentParser parser = XContentType.CBOR.xContent().createParser(queryInnerHitsSearchSource)) { + builder.copyCurrentStructure(parser); + } + return builder; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeBytesReference(queryInnerHitsSearchSource); + } + + public XContentParser getXcontentParser() throws IOException { + return XContentType.CBOR.xContent().createParser(queryInnerHitsSearchSource); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + QueryInnerHits that = (QueryInnerHits) o; + + return queryInnerHitsSearchSource.equals(that.queryInnerHitsSearchSource); + + } + + @Override + public int hashCode() { + return queryInnerHitsSearchSource.hashCode(); + } +} diff --git a/core/src/main/java/org/elasticsearch/index/query/support/QueryParsers.java b/core/src/main/java/org/elasticsearch/index/query/support/QueryParsers.java index 1a12c74a318..a500393c160 100644 --- a/core/src/main/java/org/elasticsearch/index/query/support/QueryParsers.java +++ b/core/src/main/java/org/elasticsearch/index/query/support/QueryParsers.java @@ -29,12 +29,12 @@ import org.elasticsearch.common.ParseFieldMatcher; */ public final class QueryParsers { - private static final ParseField CONSTANT_SCORE = new ParseField("constant_score", "constant_score_auto", "constant_score_filter"); - private static final ParseField SCORING_BOOLEAN = new ParseField("scoring_boolean"); - private static final ParseField CONSTANT_SCORE_BOOLEAN = new ParseField("constant_score_boolean"); - private static final ParseField TOP_TERMS = new ParseField("top_terms_"); - private static final ParseField TOP_TERMS_BOOST = new ParseField("top_terms_boost_"); - private static final ParseField TOP_TERMS_BLENDED_FREQS = new ParseField("top_terms_blended_freqs_"); + public static final ParseField CONSTANT_SCORE = new ParseField("constant_score", "constant_score_auto", "constant_score_filter"); + public static final ParseField SCORING_BOOLEAN = new ParseField("scoring_boolean"); + public static final ParseField CONSTANT_SCORE_BOOLEAN = new ParseField("constant_score_boolean"); + public static final ParseField TOP_TERMS = new ParseField("top_terms_"); + public static final ParseField TOP_TERMS_BOOST = new ParseField("top_terms_boost_"); + public static final ParseField TOP_TERMS_BLENDED_FREQS = new ParseField("top_terms_blended_freqs_"); private QueryParsers() { diff --git a/core/src/main/java/org/elasticsearch/index/query/support/XContentStructure.java b/core/src/main/java/org/elasticsearch/index/query/support/XContentStructure.java deleted file mode 100644 index 37716d12154..00000000000 --- a/core/src/main/java/org/elasticsearch/index/query/support/XContentStructure.java +++ /dev/null @@ -1,136 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.query.support; - -import org.apache.lucene.search.Query; -import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.query.QueryParseContext; - -import java.io.IOException; - -/** - * XContentStructure is a class used to capture a subset of query, to be parsed - * at a later time when more information (in this case, types) is available. - * Note that using this class requires copying the parser's data, which will - * result in additional overhead versus parsing the inner query/filter - * immediately, however, the extra overhead means that the type not be - * extracted prior to query parsing (in the case of unordered JSON). - */ -public abstract class XContentStructure { - - private final QueryParseContext parseContext; - private BytesReference innerBytes; - - /** - * Create a new XContentStructure for the current parsing context. - */ - public XContentStructure(QueryParseContext queryParseContext) { - this.parseContext = queryParseContext; - } - - /** - * "Freeze" the parsing content, which means copying the current parser's - * structure into an internal {@link BytesReference} to be parsed later. - * @return the original XContentStructure object - */ - public XContentStructure freeze() throws IOException { - this.bytes(XContentFactory.smileBuilder().copyCurrentStructure(parseContext.parser()).bytes()); - return this; - } - - /** - * Set the bytes to be used for parsing - */ - public void bytes(BytesReference innerBytes) { - this.innerBytes = innerBytes; - } - - /** - * Return the bytes that are going to be used for parsing - */ - public BytesReference bytes() { - return this.innerBytes; - } - - /** - * Use the captured bytes to parse the inner query using the specified - * types. The original QueryParseContext's parser is switched during this - * parsing, so this method is NOT thread-safe. - * @param types types to be used during the inner query parsing - * @return {@link Query} parsed from the bytes captured in {@code freeze()} - */ - public Query asQuery(String... types) throws IOException { - BytesReference br = this.bytes(); - assert br != null : "innerBytes must be set with .bytes(bytes) or .freeze() before parsing"; - XContentParser innerParser = XContentHelper.createParser(br); - String[] origTypes = QueryParseContext.setTypesWithPrevious(types); - XContentParser old = parseContext.parser(); - parseContext.parser(innerParser); - try { - return parseContext.parseInnerQuery(); - } finally { - parseContext.parser(old); - QueryParseContext.setTypes(origTypes); - } - } - - /** - * InnerQuery is an extension of {@code XContentStructure} that eagerly - * parses the query in a streaming manner if the types are available at - * construction time. - */ - public static class InnerQuery extends XContentStructure { - private Query query = null; - private boolean queryParsed = false; - public InnerQuery(QueryParseContext parseContext1, @Nullable String... types) throws IOException { - super(parseContext1); - if (types != null) { - String[] origTypes = QueryParseContext.setTypesWithPrevious(types); - try { - query = parseContext1.parseInnerQuery(); - queryParsed = true; - } finally { - QueryParseContext.setTypes(origTypes); - } - } else { - BytesReference innerBytes = XContentFactory.smileBuilder().copyCurrentStructure(parseContext1.parser()).bytes(); - super.bytes(innerBytes); - } - } - - /** - * Return the query represented by the XContentStructure object, - * returning the cached Query if it has already been parsed. - * @param types types to be used during the inner query parsing - */ - @Override - public Query asQuery(String... types) throws IOException { - if (!queryParsed) { // query can be null - this.query = super.asQuery(types); - } - return this.query; - } - } - -} diff --git a/core/src/main/java/org/elasticsearch/index/search/MatchQuery.java b/core/src/main/java/org/elasticsearch/index/search/MatchQuery.java index fb5fff837a8..f0cb1d4f294 100644 --- a/core/src/main/java/org/elasticsearch/index/search/MatchQuery.java +++ b/core/src/main/java/org/elasticsearch/index/search/MatchQuery.java @@ -25,12 +25,16 @@ import org.apache.lucene.queries.ExtendedCommonTermsQuery; import org.apache.lucene.search.*; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.util.QueryBuilder; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.lucene.search.MultiPhrasePrefixQuery; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.query.QueryParseContext; +import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.support.QueryParsers; import java.io.IOException; @@ -38,18 +42,92 @@ import java.util.List; public class MatchQuery { - public static enum Type { - BOOLEAN, - PHRASE, - PHRASE_PREFIX + public static enum Type implements Writeable { + /** + * The text is analyzed and terms are added to a boolean query. + */ + BOOLEAN(0), + /** + * The text is analyzed and used as a phrase query. + */ + PHRASE(1), + /** + * The text is analyzed and used in a phrase query, with the last term acting as a prefix. + */ + PHRASE_PREFIX(2); + + private final int ordinal; + + private static final Type PROTOTYPE = BOOLEAN; + + private Type(int ordinal) { + this.ordinal = ordinal; + } + + @Override + public Type readFrom(StreamInput in) throws IOException { + int ord = in.readVInt(); + for (Type type : Type.values()) { + if (type.ordinal == ord) { + return type; + } + } + throw new ElasticsearchException("unknown serialized type [" + ord + "]"); + } + + public static Type readTypeFrom(StreamInput in) throws IOException { + return PROTOTYPE.readFrom(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeVInt(this.ordinal); + } } - public static enum ZeroTermsQuery { - NONE, - ALL + public static enum ZeroTermsQuery implements Writeable { + NONE(0), + ALL(1); + + private final int ordinal; + + private static final ZeroTermsQuery PROTOTYPE = NONE; + + private ZeroTermsQuery(int ordinal) { + this.ordinal = ordinal; + } + + @Override + public ZeroTermsQuery readFrom(StreamInput in) throws IOException { + int ord = in.readVInt(); + for (ZeroTermsQuery zeroTermsQuery : ZeroTermsQuery.values()) { + if (zeroTermsQuery.ordinal == ord) { + return zeroTermsQuery; + } + } + throw new ElasticsearchException("unknown serialized type [" + ord + "]"); + } + + public static ZeroTermsQuery readZeroTermsQueryFrom(StreamInput in) throws IOException { + return PROTOTYPE.readFrom(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeVInt(this.ordinal); + } } - protected final QueryParseContext parseContext; + /** the default phrase slop */ + public static final int DEFAULT_PHRASE_SLOP = 0; + + /** the default leniency setting */ + public static final boolean DEFAULT_LENIENCY = false; + + /** the default zero terms query */ + public static final ZeroTermsQuery DEFAULT_ZERO_TERMS_QUERY = ZeroTermsQuery.NONE; + + protected final QueryShardContext context; protected String analyzer; @@ -57,26 +135,26 @@ public class MatchQuery { protected boolean enablePositionIncrements = true; - protected int phraseSlop = 0; + protected int phraseSlop = DEFAULT_PHRASE_SLOP; protected Fuzziness fuzziness = null; - + protected int fuzzyPrefixLength = FuzzyQuery.defaultPrefixLength; - + protected int maxExpansions = FuzzyQuery.defaultMaxExpansions; protected boolean transpositions = FuzzyQuery.defaultTranspositions; protected MultiTermQuery.RewriteMethod fuzzyRewriteMethod; - protected boolean lenient; + protected boolean lenient = DEFAULT_LENIENCY; + + protected ZeroTermsQuery zeroTermsQuery = DEFAULT_ZERO_TERMS_QUERY; - protected ZeroTermsQuery zeroTermsQuery = ZeroTermsQuery.NONE; - protected Float commonTermsCutoff = null; - - public MatchQuery(QueryParseContext parseContext) { - this.parseContext = parseContext; + + public MatchQuery(QueryShardContext context) { + this.context = context; } public void setAnalyzer(String analyzer) { @@ -86,9 +164,9 @@ public class MatchQuery { public void setOccur(BooleanClause.Occur occur) { this.occur = occur; } - - public void setCommonTermsCutoff(float cutoff) { - this.commonTermsCutoff = Float.valueOf(cutoff); + + public void setCommonTermsCutoff(Float cutoff) { + this.commonTermsCutoff = cutoff; } public void setEnablePositionIncrements(boolean enablePositionIncrements) { @@ -134,11 +212,11 @@ public class MatchQuery { protected Analyzer getAnalyzer(MappedFieldType fieldType) { if (this.analyzer == null) { if (fieldType != null) { - return parseContext.getSearchAnalyzer(fieldType); + return context.getSearchAnalyzer(fieldType); } - return parseContext.mapperService().searchAnalyzer(); + return context.mapperService().searchAnalyzer(); } else { - Analyzer analyzer = parseContext.mapperService().analysisService().analyzer(this.analyzer); + Analyzer analyzer = context.mapperService().analysisService().analyzer(this.analyzer); if (analyzer == null) { throw new IllegalArgumentException("No analyzer found for [" + this.analyzer + "]"); } @@ -148,7 +226,7 @@ public class MatchQuery { public Query parse(Type type, String fieldName, Object value) throws IOException { final String field; - MappedFieldType fieldType = parseContext.fieldMapper(fieldName); + MappedFieldType fieldType = context.fieldMapper(fieldName); if (fieldType != null) { field = fieldType.names().indexName(); } else { @@ -157,14 +235,14 @@ public class MatchQuery { if (fieldType != null && fieldType.useTermQueryWithQueryString() && !forceAnalyzeQueryString()) { try { - return fieldType.termQuery(value, parseContext); + return fieldType.termQuery(value, context); } catch (RuntimeException e) { if (lenient) { return null; } throw e; } - + } Analyzer analyzer = getAnalyzer(fieldType); assert analyzer != null; @@ -198,7 +276,7 @@ public class MatchQuery { } protected Query zeroTermsQuery() { - return zeroTermsQuery == ZeroTermsQuery.NONE ? Queries.newMatchNoDocsQuery() : Queries.newMatchAllQuery(); + return zeroTermsQuery == DEFAULT_ZERO_TERMS_QUERY ? Queries.newMatchNoDocsQuery() : Queries.newMatchAllQuery(); } private class MatchQueryBuilder extends QueryBuilder { diff --git a/core/src/main/java/org/elasticsearch/index/search/MultiMatchQuery.java b/core/src/main/java/org/elasticsearch/index/search/MultiMatchQuery.java index 08cc55f3bd8..5fb2db0aa10 100644 --- a/core/src/main/java/org/elasticsearch/index/search/MultiMatchQuery.java +++ b/core/src/main/java/org/elasticsearch/index/search/MultiMatchQuery.java @@ -31,7 +31,7 @@ import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.query.MultiMatchQueryBuilder; -import org.elasticsearch.index.query.QueryParseContext; +import org.elasticsearch.index.query.QueryShardContext; import java.io.IOException; import java.util.ArrayList; @@ -47,10 +47,10 @@ public class MultiMatchQuery extends MatchQuery { this.groupTieBreaker = tieBreaker; } - public MultiMatchQuery(QueryParseContext parseContext) { - super(parseContext); + public MultiMatchQuery(QueryShardContext context) { + super(context); } - + private Query parseAndApply(Type type, String fieldName, Object value, String minimumShouldMatch, Float boostValue) throws IOException { Query query = parse(type, fieldName, value); if (query instanceof BooleanQuery) { @@ -162,7 +162,7 @@ public class MultiMatchQuery extends MatchQuery { List> missing = new ArrayList<>(); for (Map.Entry entry : fieldNames.entrySet()) { String name = entry.getKey(); - MappedFieldType fieldType = parseContext.fieldMapper(name); + MappedFieldType fieldType = context.fieldMapper(name); if (fieldType != null) { Analyzer actualAnalyzer = getAnalyzer(fieldType); name = fieldType.names().indexName(); diff --git a/core/src/main/java/org/elasticsearch/index/search/morelikethis/MoreLikeThisFetchService.java b/core/src/main/java/org/elasticsearch/index/search/morelikethis/MoreLikeThisFetchService.java deleted file mode 100644 index 49643aaafa6..00000000000 --- a/core/src/main/java/org/elasticsearch/index/search/morelikethis/MoreLikeThisFetchService.java +++ /dev/null @@ -1,100 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.search.morelikethis; - -import org.apache.lucene.index.Fields; -import org.elasticsearch.action.termvectors.MultiTermVectorsItemResponse; -import org.elasticsearch.action.termvectors.MultiTermVectorsRequest; -import org.elasticsearch.action.termvectors.MultiTermVectorsResponse; -import org.elasticsearch.action.termvectors.TermVectorsResponse; -import org.elasticsearch.client.Client; -import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.query.MoreLikeThisQueryBuilder.Item; -import org.elasticsearch.search.internal.SearchContext; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.HashSet; -import java.util.List; -import java.util.Set; - -/** - * - */ -public class MoreLikeThisFetchService extends AbstractComponent { - - private final Client client; - - @Inject - public MoreLikeThisFetchService(Client client, Settings settings) { - super(settings); - this.client = client; - } - - public Fields[] fetch(List items) throws IOException { - return getFieldsFor(fetchResponse(items, null, SearchContext.current()), items); - } - - public MultiTermVectorsResponse fetchResponse(List likeItems, @Nullable List unlikeItems, - SearchContext searchContext) throws IOException { - MultiTermVectorsRequest request = new MultiTermVectorsRequest(); - for (Item item : likeItems) { - request.add(item.toTermVectorsRequest()); - } - if (unlikeItems != null) { - for (Item item : unlikeItems) { - request.add(item.toTermVectorsRequest()); - } - } - request.copyContextAndHeadersFrom(searchContext); - return client.multiTermVectors(request).actionGet(); - } - - public static Fields[] getFieldsFor(MultiTermVectorsResponse responses, List items) throws IOException { - List likeFields = new ArrayList<>(); - - Set selectedItems = new HashSet<>(); - for (Item request : items) { - selectedItems.add(new Item(request.index(), request.type(), request.id())); - } - - for (MultiTermVectorsItemResponse response : responses) { - if (!hasResponseFromRequest(response, selectedItems)) { - continue; - } - if (response.isFailed()) { - continue; - } - TermVectorsResponse getResponse = response.getResponse(); - if (!getResponse.isExists()) { - continue; - } - likeFields.add(getResponse.getFields()); - } - return likeFields.toArray(Fields.EMPTY_ARRAY); - } - - private static boolean hasResponseFromRequest(MultiTermVectorsItemResponse response, Set selectedItems) { - return selectedItems.contains(new Item(response.getIndex(), response.getType(), response.getId())); - } -} diff --git a/core/src/main/java/org/elasticsearch/index/search/shape/ShapeFetchService.java b/core/src/main/java/org/elasticsearch/index/search/shape/ShapeFetchService.java deleted file mode 100644 index 97d08045a65..00000000000 --- a/core/src/main/java/org/elasticsearch/index/search/shape/ShapeFetchService.java +++ /dev/null @@ -1,91 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.search.shape; - -import org.elasticsearch.action.get.GetRequest; -import org.elasticsearch.action.get.GetResponse; -import org.elasticsearch.client.Client; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.geo.builders.ShapeBuilder; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.common.xcontent.XContentParser; - -import java.io.IOException; - -/** - * Service which retrieves pre-indexed Shapes from another index - */ -public class ShapeFetchService extends AbstractComponent { - - private final Client client; - - @Inject - public ShapeFetchService(Client client, Settings settings) { - super(settings); - this.client = client; - } - - /** - * Fetches the Shape with the given ID in the given type and index. - * - * @param getRequest GetRequest containing index, type and id - * @param path Name or path of the field in the Shape Document where the Shape itself is located - * @return Shape with the given ID - * @throws IOException Can be thrown while parsing the Shape Document and extracting the Shape - */ - public ShapeBuilder fetch(GetRequest getRequest,String path) throws IOException { - getRequest.preference("_local"); - getRequest.operationThreaded(false); - GetResponse response = client.get(getRequest).actionGet(); - if (!response.isExists()) { - throw new IllegalArgumentException("Shape with ID [" + getRequest.id() + "] in type [" + getRequest.type() + "] not found"); - } - - String[] pathElements = Strings.splitStringToArray(path, '.'); - int currentPathSlot = 0; - - XContentParser parser = null; - try { - parser = XContentHelper.createParser(response.getSourceAsBytesRef()); - XContentParser.Token currentToken; - while ((currentToken = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (currentToken == XContentParser.Token.FIELD_NAME) { - if (pathElements[currentPathSlot].equals(parser.currentName())) { - parser.nextToken(); - if (++currentPathSlot == pathElements.length) { - return ShapeBuilder.parse(parser); - } - } else { - parser.nextToken(); - parser.skipChildren(); - } - } - } - throw new IllegalStateException("Shape with name [" + getRequest.id() + "] found but missing " + path + " field"); - } finally { - if (parser != null) { - parser.close(); - } - } - } -} diff --git a/core/src/main/java/org/elasticsearch/index/search/shape/ShapeModule.java b/core/src/main/java/org/elasticsearch/index/search/shape/ShapeModule.java deleted file mode 100644 index 510d04f083e..00000000000 --- a/core/src/main/java/org/elasticsearch/index/search/shape/ShapeModule.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.search.shape; - -import org.elasticsearch.common.geo.ShapesAvailability; -import org.elasticsearch.common.inject.AbstractModule; - -public class ShapeModule extends AbstractModule { - - @Override - protected void configure() { - // TODO: We could wrap this entire module in a JTS_AVAILABILITY check - if (ShapesAvailability.JTS_AVAILABLE) { - bind(ShapeFetchService.class).asEagerSingleton(); - } - } -} diff --git a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java index 8e6e143cb81..683beacfa51 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -32,6 +32,8 @@ import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.flush.FlushRequest; import org.elasticsearch.action.admin.indices.optimize.OptimizeRequest; import org.elasticsearch.action.admin.indices.upgrade.post.UpgradeRequest; +import org.elasticsearch.action.termvectors.TermVectorsRequest; +import org.elasticsearch.action.termvectors.TermVectorsResponse; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.node.DiscoveryNode; @@ -67,6 +69,7 @@ import org.elasticsearch.index.deletionpolicy.SnapshotDeletionPolicy; import org.elasticsearch.index.deletionpolicy.SnapshotIndexCommit; import org.elasticsearch.index.engine.*; import org.elasticsearch.index.fielddata.FieldDataStats; +import org.elasticsearch.index.fielddata.IndexFieldDataCache; import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.fielddata.ShardFieldData; import org.elasticsearch.index.flush.FlushStats; @@ -91,7 +94,7 @@ import org.elasticsearch.index.store.StoreFileMetaData; import org.elasticsearch.index.store.StoreStats; import org.elasticsearch.index.suggest.stats.ShardSuggestMetric; import org.elasticsearch.index.suggest.stats.SuggestStats; -import org.elasticsearch.index.termvectors.ShardTermVectorsService; +import org.elasticsearch.index.termvectors.TermVectorsService; import org.elasticsearch.index.translog.Translog; import org.elasticsearch.index.translog.TranslogConfig; import org.elasticsearch.index.translog.TranslogStats; @@ -117,8 +120,6 @@ import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; -import java.util.concurrent.locks.Lock; -import java.util.concurrent.locks.ReentrantLock; public class IndexShard extends AbstractIndexShardComponent { @@ -139,7 +140,7 @@ public class IndexShard extends AbstractIndexShardComponent { private final ShardFieldData shardFieldData; private final PercolatorQueriesRegistry percolatorQueriesRegistry; private final ShardPercolateService shardPercolateService; - private final ShardTermVectorsService termVectorsService; + private final TermVectorsService termVectorsService; private final IndexFieldDataService indexFieldDataService; private final IndexService indexService; private final ShardSuggestMetric shardSuggestMetric = new ShardSuggestMetric(); @@ -203,8 +204,8 @@ public class IndexShard extends AbstractIndexShardComponent { @Inject public IndexShard(ShardId shardId, IndexSettingsService indexSettingsService, IndicesLifecycle indicesLifecycle, Store store, StoreRecoveryService storeRecoveryService, ThreadPool threadPool, MapperService mapperService, IndexQueryParserService queryParserService, IndexCache indexCache, IndexAliasesService indexAliasesService, - IndicesQueryCache indicesQueryCache, ShardPercolateService shardPercolateService, CodecService codecService, - ShardTermVectorsService termVectorsService, IndexFieldDataService indexFieldDataService, IndexService indexService, + IndicesQueryCache indicesQueryCache, CodecService codecService, + TermVectorsService termVectorsService, IndexFieldDataService indexFieldDataService, IndexService indexService, @Nullable IndicesWarmer warmer, SnapshotDeletionPolicy deletionPolicy, SimilarityService similarityService, EngineFactory factory, ClusterService clusterService, ShardPath path, BigArrays bigArrays, IndexSearcherWrappingService wrappingService) { super(shardId, indexSettingsService.getSettings()); @@ -228,14 +229,14 @@ public class IndexShard extends AbstractIndexShardComponent { this.indexAliasesService = indexAliasesService; this.indexingService = new ShardIndexingService(shardId, indexSettings); this.getService = new ShardGetService(this, mapperService); - this.termVectorsService = termVectorsService.setIndexShard(this); + this.termVectorsService = termVectorsService; this.searchService = new ShardSearchStats(indexSettings); this.shardWarmerService = new ShardIndexWarmerService(shardId, indexSettings); this.indicesQueryCache = indicesQueryCache; this.shardQueryCache = new ShardRequestCache(shardId, indexSettings); this.shardFieldData = new ShardFieldData(); + this.shardPercolateService = new ShardPercolateService(shardId, indexSettings); this.percolatorQueriesRegistry = new PercolatorQueriesRegistry(shardId, indexSettings, queryParserService, indexingService, indicesLifecycle, mapperService, indexFieldDataService, shardPercolateService); - this.shardPercolateService = shardPercolateService; this.indexFieldDataService = indexFieldDataService; this.indexService = indexService; this.shardBitsetFilterCache = new ShardBitsetFilterCache(shardId, indexSettings); @@ -286,10 +287,6 @@ public class IndexShard extends AbstractIndexShardComponent { return this.getService; } - public ShardTermVectorsService termVectorsService() { - return termVectorsService; - } - public ShardSuggestMetric getSuggestMetric() { return shardSuggestMetric; } @@ -638,6 +635,10 @@ public class IndexShard extends AbstractIndexShardComponent { return segmentsStats; } + public TermVectorsResponse getTermVectors(TermVectorsRequest request) { + return this.termVectorsService.getTermVectors(this, request); + } + public WarmerStats warmerStats() { return shardWarmerService.stats(); } @@ -1504,6 +1505,7 @@ public class IndexShard extends AbstractIndexShardComponent { // in that situation we have an extra unexpected flush. asyncFlushRunning.compareAndSet(true, false); } else { + logger.debug("submitting async flush request"); final AbstractRunnable abstractRunnable = new AbstractRunnable() { @Override public void onFailure(Throwable t) { @@ -1520,6 +1522,7 @@ public class IndexShard extends AbstractIndexShardComponent { @Override public void onAfter() { asyncFlushRunning.compareAndSet(true, false); + maybeFlush(); // fire a flush up again if we have filled up the limits such that shouldFlush() returns true } }; threadPool.executor(ThreadPool.Names.FLUSH).execute(abstractRunnable); @@ -1529,5 +1532,4 @@ public class IndexShard extends AbstractIndexShardComponent { } return false; } - } diff --git a/core/src/main/java/org/elasticsearch/index/shard/IndexShardModule.java b/core/src/main/java/org/elasticsearch/index/shard/IndexShardModule.java index 08cf9b27ab8..cd6d3a87337 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/IndexShardModule.java +++ b/core/src/main/java/org/elasticsearch/index/shard/IndexShardModule.java @@ -27,8 +27,6 @@ import org.elasticsearch.index.engine.IndexSearcherWrapper; import org.elasticsearch.index.engine.IndexSearcherWrappingService; import org.elasticsearch.index.engine.EngineFactory; import org.elasticsearch.index.engine.InternalEngineFactory; -import org.elasticsearch.index.percolator.stats.ShardPercolateService; -import org.elasticsearch.index.termvectors.ShardTermVectorsService; /** * The {@code IndexShardModule} module is responsible for binding the correct @@ -69,8 +67,6 @@ public class IndexShardModule extends AbstractModule { bind(EngineFactory.class).to(engineFactoryImpl); bind(StoreRecoveryService.class).asEagerSingleton(); - bind(ShardPercolateService.class).asEagerSingleton(); - bind(ShardTermVectorsService.class).asEagerSingleton(); bind(IndexSearcherWrappingService.class).asEagerSingleton(); // this injects an empty set in IndexSearcherWrappingService, otherwise guice can't construct IndexSearcherWrappingService Multibinder multibinder diff --git a/core/src/main/java/org/elasticsearch/index/shard/ShadowIndexShard.java b/core/src/main/java/org/elasticsearch/index/shard/ShadowIndexShard.java index 502724e461c..6c45331f826 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/ShadowIndexShard.java +++ b/core/src/main/java/org/elasticsearch/index/shard/ShadowIndexShard.java @@ -40,7 +40,7 @@ import org.elasticsearch.index.query.IndexQueryParserService; import org.elasticsearch.index.settings.IndexSettingsService; import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.index.store.Store; -import org.elasticsearch.index.termvectors.ShardTermVectorsService; +import org.elasticsearch.index.termvectors.TermVectorsService; import org.elasticsearch.indices.IndicesLifecycle; import org.elasticsearch.indices.IndicesWarmer; import org.elasticsearch.indices.cache.query.IndicesQueryCache; @@ -62,15 +62,14 @@ public final class ShadowIndexShard extends IndexShard { ThreadPool threadPool, MapperService mapperService, IndexQueryParserService queryParserService, IndexCache indexCache, IndexAliasesService indexAliasesService, IndicesQueryCache indicesQueryCache, - ShardPercolateService shardPercolateService, CodecService codecService, - ShardTermVectorsService termVectorsService, IndexFieldDataService indexFieldDataService, + CodecService codecService, TermVectorsService termVectorsService, IndexFieldDataService indexFieldDataService, IndexService indexService, @Nullable IndicesWarmer warmer, SnapshotDeletionPolicy deletionPolicy, SimilarityService similarityService, EngineFactory factory, ClusterService clusterService, ShardPath path, BigArrays bigArrays, IndexSearcherWrappingService wrappingService) throws IOException { super(shardId, indexSettingsService, indicesLifecycle, store, storeRecoveryService, threadPool, mapperService, queryParserService, indexCache, indexAliasesService, - indicesQueryCache, shardPercolateService, codecService, + indicesQueryCache, codecService, termVectorsService, indexFieldDataService, indexService, warmer, deletionPolicy, similarityService, factory, clusterService, path, bigArrays, wrappingService); diff --git a/core/src/main/java/org/elasticsearch/index/termvectors/ShardTermVectorsService.java b/core/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java similarity index 86% rename from core/src/main/java/org/elasticsearch/index/termvectors/ShardTermVectorsService.java rename to core/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java index cb34c1167d5..5b27d327806 100644 --- a/core/src/main/java/org/elasticsearch/index/termvectors/ShardTermVectorsService.java +++ b/core/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java @@ -33,6 +33,7 @@ import org.elasticsearch.cluster.action.index.MappingUpdatedAction; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.common.settings.Settings; @@ -42,10 +43,7 @@ import org.elasticsearch.index.get.GetResult; import org.elasticsearch.index.mapper.*; import org.elasticsearch.index.mapper.core.StringFieldMapper; import org.elasticsearch.index.mapper.internal.UidFieldMapper; -import org.elasticsearch.index.settings.IndexSettings; -import org.elasticsearch.index.shard.AbstractIndexShardComponent; import org.elasticsearch.index.shard.IndexShard; -import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.search.dfs.AggregatedDfs; import java.io.IOException; @@ -56,27 +54,20 @@ import static org.elasticsearch.index.mapper.SourceToParse.source; /** */ -public class ShardTermVectorsService extends AbstractIndexShardComponent { +public class TermVectorsService { - private IndexShard indexShard; private final MappingUpdatedAction mappingUpdatedAction; private final TransportDfsOnlyAction dfsAction; @Inject - public ShardTermVectorsService(ShardId shardId, @IndexSettings Settings indexSettings, MappingUpdatedAction mappingUpdatedAction, TransportDfsOnlyAction dfsAction) { - super(shardId, indexSettings); + public TermVectorsService(MappingUpdatedAction mappingUpdatedAction, TransportDfsOnlyAction dfsAction) { this.mappingUpdatedAction = mappingUpdatedAction; this.dfsAction = dfsAction; } - // sadly, to overcome cyclic dep, we need to do this and inject it ourselves... - public ShardTermVectorsService setIndexShard(IndexShard indexShard) { - this.indexShard = indexShard; - return this; - } - public TermVectorsResponse getTermVectors(TermVectorsRequest request, String concreteIndex) { - final TermVectorsResponse termVectorsResponse = new TermVectorsResponse(concreteIndex, request.type(), request.id()); + public TermVectorsResponse getTermVectors(IndexShard indexShard, TermVectorsRequest request) { + final TermVectorsResponse termVectorsResponse = new TermVectorsResponse(indexShard.shardId().index().name(), request.type(), request.id()); final Term uidTerm = new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(request.type(), request.id())); Engine.GetResult get = indexShard.get(new Engine.Get(request.realtime(), uidTerm).version(request.version()).versionType(request.versionType())); @@ -94,7 +85,7 @@ public class ShardTermVectorsService extends AbstractIndexShardComponent { /* handle potential wildcards in fields */ if (request.selectedFields() != null) { - handleFieldWildcards(request); + handleFieldWildcards(indexShard, request); } final Engine.Searcher searcher = indexShard.acquireSearcher("term_vector"); @@ -103,7 +94,7 @@ public class ShardTermVectorsService extends AbstractIndexShardComponent { Versions.DocIdAndVersion docIdAndVersion = get.docIdAndVersion(); /* from an artificial document */ if (request.doc() != null) { - termVectorsByField = generateTermVectorsFromDoc(request, !docFromTranslog); + termVectorsByField = generateTermVectorsFromDoc(indexShard, request, !docFromTranslog); // if no document indexed in shard, take the queried document itself for stats if (topLevelFields == null) { topLevelFields = termVectorsByField; @@ -122,7 +113,7 @@ public class ShardTermVectorsService extends AbstractIndexShardComponent { } // fields without term vectors if (selectedFields != null) { - termVectorsByField = addGeneratedTermVectors(get, termVectorsByField, request, selectedFields); + termVectorsByField = addGeneratedTermVectors(indexShard, get, termVectorsByField, request, selectedFields); } termVectorsResponse.setDocVersion(docIdAndVersion.version); termVectorsResponse.setExists(true); @@ -158,7 +149,7 @@ public class ShardTermVectorsService extends AbstractIndexShardComponent { return termVectorsResponse; } - private void handleFieldWildcards(TermVectorsRequest request) { + private void handleFieldWildcards(IndexShard indexShard, TermVectorsRequest request) { Set fieldNames = new HashSet<>(); for (String pattern : request.selectedFields()) { fieldNames.addAll(indexShard.mapperService().simpleMatchToIndexNames(pattern)); @@ -178,7 +169,7 @@ public class ShardTermVectorsService extends AbstractIndexShardComponent { return true; } - private Fields addGeneratedTermVectors(Engine.GetResult get, Fields termVectorsByField, TermVectorsRequest request, Set selectedFields) throws IOException { + private Fields addGeneratedTermVectors(IndexShard indexShard, Engine.GetResult get, Fields termVectorsByField, TermVectorsRequest request, Set selectedFields) throws IOException { /* only keep valid fields */ Set validFields = new HashSet<>(); for (String field : selectedFields) { @@ -201,7 +192,7 @@ public class ShardTermVectorsService extends AbstractIndexShardComponent { /* generate term vectors from fetched document fields */ GetResult getResult = indexShard.getService().get( get, request.id(), request.type(), validFields.toArray(Strings.EMPTY_ARRAY), null, false); - Fields generatedTermVectors = generateTermVectors(getResult.getFields().values(), request.offsets(), request.perFieldAnalyzer(), validFields); + Fields generatedTermVectors = generateTermVectors(indexShard, getResult.getFields().values(), request.offsets(), request.perFieldAnalyzer(), validFields); /* merge with existing Fields */ if (termVectorsByField == null) { @@ -211,7 +202,7 @@ public class ShardTermVectorsService extends AbstractIndexShardComponent { } } - private Analyzer getAnalyzerAtField(String field, @Nullable Map perFieldAnalyzer) { + private Analyzer getAnalyzerAtField(IndexShard indexShard, String field, @Nullable Map perFieldAnalyzer) { MapperService mapperService = indexShard.mapperService(); Analyzer analyzer; if (perFieldAnalyzer != null && perFieldAnalyzer.containsKey(field)) { @@ -235,7 +226,7 @@ public class ShardTermVectorsService extends AbstractIndexShardComponent { return selectedFields; } - private Fields generateTermVectors(Collection getFields, boolean withOffsets, @Nullable Map perFieldAnalyzer, Set fields) + private Fields generateTermVectors(IndexShard indexShard, Collection getFields, boolean withOffsets, @Nullable Map perFieldAnalyzer, Set fields) throws IOException { /* store document in memory index */ MemoryIndex index = new MemoryIndex(withOffsets); @@ -245,7 +236,7 @@ public class ShardTermVectorsService extends AbstractIndexShardComponent { // some fields are returned even when not asked for, eg. _timestamp continue; } - Analyzer analyzer = getAnalyzerAtField(field, perFieldAnalyzer); + Analyzer analyzer = getAnalyzerAtField(indexShard, field, perFieldAnalyzer); for (Object text : getField.getValues()) { index.addField(field, text.toString(), analyzer); } @@ -254,9 +245,9 @@ public class ShardTermVectorsService extends AbstractIndexShardComponent { return MultiFields.getFields(index.createSearcher().getIndexReader()); } - private Fields generateTermVectorsFromDoc(TermVectorsRequest request, boolean doAllFields) throws Throwable { + private Fields generateTermVectorsFromDoc(IndexShard indexShard, TermVectorsRequest request, boolean doAllFields) throws Throwable { // parse the document, at the moment we do update the mapping, just like percolate - ParsedDocument parsedDocument = parseDocument(indexShard.shardId().getIndex(), request.type(), request.doc()); + ParsedDocument parsedDocument = parseDocument(indexShard, indexShard.shardId().getIndex(), request.type(), request.doc()); // select the right fields and generate term vectors ParseContext.Document doc = parsedDocument.rootDoc(); @@ -282,10 +273,10 @@ public class ShardTermVectorsService extends AbstractIndexShardComponent { String[] values = doc.getValues(field.name()); getFields.add(new GetField(field.name(), Arrays.asList((Object[]) values))); } - return generateTermVectors(getFields, request.offsets(), request.perFieldAnalyzer(), seenFields); + return generateTermVectors(indexShard, getFields, request.offsets(), request.perFieldAnalyzer(), seenFields); } - private ParsedDocument parseDocument(String index, String type, BytesReference doc) throws Throwable { + private ParsedDocument parseDocument(IndexShard indexShard, String index, String type, BytesReference doc) throws Throwable { MapperService mapperService = indexShard.mapperService(); // TODO: make parsing not dynamically create fields not in the original mapping diff --git a/core/src/main/java/org/elasticsearch/indices/IndicesModule.java b/core/src/main/java/org/elasticsearch/indices/IndicesModule.java index 32c7bc833b0..ff9bd334d9f 100644 --- a/core/src/main/java/org/elasticsearch/indices/IndicesModule.java +++ b/core/src/main/java/org/elasticsearch/indices/IndicesModule.java @@ -29,6 +29,7 @@ import org.elasticsearch.common.util.ExtensionPoint; import org.elasticsearch.index.query.*; import org.elasticsearch.index.query.functionscore.FunctionScoreQueryParser; import org.elasticsearch.index.query.MoreLikeThisQueryParser; +import org.elasticsearch.index.termvectors.TermVectorsService; import org.elasticsearch.indices.analysis.HunspellService; import org.elasticsearch.indices.analysis.IndicesAnalysisService; import org.elasticsearch.indices.cache.query.IndicesQueryCache; @@ -110,6 +111,7 @@ public class IndicesModule extends AbstractModule { registerQueryParser(NotQueryParser.class); registerQueryParser(ExistsQueryParser.class); registerQueryParser(MissingQueryParser.class); + registerQueryParser(MatchNoneQueryParser.class); if (ShapesAvailability.JTS_AVAILABLE) { registerQueryParser(GeoShapeQueryParser.class); @@ -147,6 +149,7 @@ public class IndicesModule extends AbstractModule { bind(UpdateHelper.class).asEagerSingleton(); bind(MetaDataIndexUpgradeService.class).asEagerSingleton(); bind(IndicesFieldDataCacheListener.class).asEagerSingleton(); + bind(TermVectorsService.class).asEagerSingleton(); } protected void bindQueryParsersExtension() { diff --git a/core/src/main/java/org/elasticsearch/indices/cache/query/terms/TermsLookup.java b/core/src/main/java/org/elasticsearch/indices/cache/query/terms/TermsLookup.java index 28ab04bd245..3e4d6bf2c0b 100644 --- a/core/src/main/java/org/elasticsearch/indices/cache/query/terms/TermsLookup.java +++ b/core/src/main/java/org/elasticsearch/indices/cache/query/terms/TermsLookup.java @@ -19,58 +19,173 @@ package org.elasticsearch.indices.cache.query.terms; -import org.elasticsearch.common.Nullable; -import org.elasticsearch.index.query.QueryParseContext; +import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.Objects; /** + * Encapsulates the parameters needed to fetch terms. */ -public class TermsLookup { +public class TermsLookup implements Writeable, ToXContent { + static final TermsLookup PROTOTYPE = new TermsLookup("index", "type", "id", "path"); - private final String index; + private String index; private final String type; private final String id; - private final String routing; private final String path; + private String routing; - @Nullable - private final QueryParseContext queryParseContext; - - public TermsLookup(String index, String type, String id, String routing, String path, @Nullable QueryParseContext queryParseContext) { + public TermsLookup(String index, String type, String id, String path) { + if (id == null) { + throw new IllegalArgumentException("[terms] query lookup element requires specifying the id."); + } + if (type == null) { + throw new IllegalArgumentException("[terms] query lookup element requires specifying the type."); + } + if (path == null) { + throw new IllegalArgumentException("[terms] query lookup element requires specifying the path."); + } this.index = index; this.type = type; this.id = id; - this.routing = routing; this.path = path; - this.queryParseContext = queryParseContext; } - public String getIndex() { + public String index() { return index; } - public String getType() { + public TermsLookup index(String index) { + this.index = index; + return this; + } + + public String type() { return type; } - public String getId() { + public String id() { return id; } - public String getRouting() { - return this.routing; - } - - public String getPath() { + public String path() { return path; } - @Nullable - public QueryParseContext getQueryParseContext() { - return queryParseContext; + public String routing() { + return routing; + } + + public TermsLookup routing(String routing) { + this.routing = routing; + return this; + } + + public static TermsLookup parseTermsLookup(XContentParser parser) throws IOException { + String index = null; + String type = null; + String id = null; + String path = null; + String routing = null; + XContentParser.Token token; + String currentFieldName = ""; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + } else if (token.isValue()) { + switch (currentFieldName) { + case "index": + index = parser.textOrNull(); + break; + case "type": + type = parser.text(); + break; + case "id": + id = parser.text(); + break; + case "routing": + routing = parser.textOrNull(); + break; + case "path": + path = parser.text(); + break; + default: + throw new ParsingException(parser.getTokenLocation(), "[terms] query does not support [" + currentFieldName + + "] within lookup element"); + } + } + } + return new TermsLookup(index, type, id, path).routing(routing); } @Override public String toString() { return index + "/" + type + "/" + id + "/" + path; } + + @Override + public TermsLookup readFrom(StreamInput in) throws IOException { + String type = in.readString(); + String id = in.readString(); + String path = in.readString(); + String index = in.readOptionalString(); + TermsLookup termsLookup = new TermsLookup(index, type, id, path); + termsLookup.routing = in.readOptionalString(); + return termsLookup; + } + + public static TermsLookup readTermsLookupFrom(StreamInput in) throws IOException { + return PROTOTYPE.readFrom(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(type); + out.writeString(id); + out.writeString(path); + out.writeOptionalString(index); + out.writeOptionalString(routing); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + if (index != null) { + builder.field("index", index); + } + builder.field("type", type); + builder.field("id", id); + builder.field("path", path); + if (routing != null) { + builder.field("routing", routing); + } + return builder; + } + + @Override + public int hashCode() { + return Objects.hash(index, type, id, path, routing); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + TermsLookup other = (TermsLookup) obj; + return Objects.equals(index, other.index) && + Objects.equals(type, other.type) && + Objects.equals(id, other.id) && + Objects.equals(path, other.path) && + Objects.equals(routing, other.routing); + } } diff --git a/core/src/main/java/org/elasticsearch/indices/memory/IndexingMemoryController.java b/core/src/main/java/org/elasticsearch/indices/memory/IndexingMemoryController.java index 2bf6946bc32..f54020802b5 100644 --- a/core/src/main/java/org/elasticsearch/indices/memory/IndexingMemoryController.java +++ b/core/src/main/java/org/elasticsearch/indices/memory/IndexingMemoryController.java @@ -38,12 +38,7 @@ import org.elasticsearch.indices.IndicesService; import org.elasticsearch.monitor.jvm.JvmInfo; import org.elasticsearch.threadpool.ThreadPool; -import java.util.ArrayList; -import java.util.EnumSet; -import java.util.HashMap; -import java.util.Iterator; -import java.util.List; -import java.util.Map; +import java.util.*; import java.util.concurrent.ScheduledFuture; /** @@ -258,7 +253,7 @@ public class IndexingMemoryController extends AbstractLifecycleComponent queryParsers; + private ImmutableMap> queryParsers; @Inject - public IndicesQueriesRegistry(Settings settings, Set injectedQueryParsers) { + public IndicesQueriesRegistry(Settings settings, Set injectedQueryParsers, NamedWriteableRegistry namedWriteableRegistry) { super(settings); - Map queryParsers = new HashMap<>(); - for (QueryParser queryParser : injectedQueryParsers) { + Map> queryParsers = new HashMap<>(); + for (QueryParser queryParser : injectedQueryParsers) { for (String name : queryParser.names()) { queryParsers.put(name, queryParser); } + namedWriteableRegistry.registerPrototype(QueryBuilder.class, queryParser.getBuilderPrototype()); } + // EmptyQueryBuilder is not registered as query parser but used internally. + // We need to register it with the NamedWriteableRegistry in order to serialize it + namedWriteableRegistry.registerPrototype(QueryBuilder.class, EmptyQueryBuilder.PROTOTYPE); this.queryParsers = ImmutableMap.copyOf(queryParsers); } /** * Returns all the registered query parsers */ - public ImmutableMap queryParsers() { + public ImmutableMap> queryParsers() { return queryParsers; } } \ No newline at end of file diff --git a/core/src/main/java/org/elasticsearch/node/Node.java b/core/src/main/java/org/elasticsearch/node/Node.java index 6d5ebc045fe..ecd7fddd3be 100644 --- a/core/src/main/java/org/elasticsearch/node/Node.java +++ b/core/src/main/java/org/elasticsearch/node/Node.java @@ -31,7 +31,6 @@ import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.action.index.MappingUpdatedAction; import org.elasticsearch.cluster.routing.RoutingService; import org.elasticsearch.common.StopWatch; -import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.component.Lifecycle; import org.elasticsearch.common.component.LifecycleComponent; import org.elasticsearch.common.inject.Injector; @@ -56,7 +55,6 @@ import org.elasticsearch.gateway.GatewayModule; import org.elasticsearch.gateway.GatewayService; import org.elasticsearch.http.HttpServer; import org.elasticsearch.http.HttpServerModule; -import org.elasticsearch.index.search.shape.ShapeModule; import org.elasticsearch.indices.IndicesModule; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.breaker.CircuitBreakerModule; @@ -189,7 +187,6 @@ public class Node implements Releasable { modules.add(new MonitorModule(settings)); modules.add(new GatewayModule(settings)); modules.add(new NodeClientModule()); - modules.add(new ShapeModule()); modules.add(new PercolatorModule()); modules.add(new ResourceWatcherModule()); modules.add(new RepositoriesModule()); diff --git a/core/src/main/java/org/elasticsearch/percolator/PercolatorService.java b/core/src/main/java/org/elasticsearch/percolator/PercolatorService.java index e8212e931bd..ba4ccaeb25e 100644 --- a/core/src/main/java/org/elasticsearch/percolator/PercolatorService.java +++ b/core/src/main/java/org/elasticsearch/percolator/PercolatorService.java @@ -65,11 +65,9 @@ import org.elasticsearch.index.IndexService; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.SortedBinaryDocValues; -import org.elasticsearch.index.mapper.*; import org.elasticsearch.index.mapper.DocumentMapperForType; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.mapper.Mapping; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.index.mapper.internal.UidFieldMapper; diff --git a/core/src/main/java/org/elasticsearch/rest/action/explain/RestExplainAction.java b/core/src/main/java/org/elasticsearch/rest/action/explain/RestExplainAction.java index ce306c6563d..7c01fddf3cc 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/explain/RestExplainAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/explain/RestExplainAction.java @@ -30,6 +30,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; import org.elasticsearch.index.get.GetResult; +import org.elasticsearch.index.query.Operator; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.QueryStringQueryBuilder; import org.elasticsearch.rest.*; @@ -74,13 +75,7 @@ public class RestExplainAction extends BaseRestHandler { queryStringBuilder.lenient(request.paramAsBoolean("lenient", null)); String defaultOperator = request.param("default_operator"); if (defaultOperator != null) { - if ("OR".equals(defaultOperator)) { - queryStringBuilder.defaultOperator(QueryStringQueryBuilder.Operator.OR); - } else if ("AND".equals(defaultOperator)) { - queryStringBuilder.defaultOperator(QueryStringQueryBuilder.Operator.AND); - } else { - throw new IllegalArgumentException("Unsupported defaultOperator [" + defaultOperator + "], can either be [OR] or [AND]"); - } + queryStringBuilder.defaultOperator(Operator.fromString(defaultOperator)); } QuerySourceBuilder querySourceBuilder = new QuerySourceBuilder(); diff --git a/core/src/main/java/org/elasticsearch/rest/action/support/RestActions.java b/core/src/main/java/org/elasticsearch/rest/action/support/RestActions.java index bd17c1d5944..674aa6902b0 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/support/RestActions.java +++ b/core/src/main/java/org/elasticsearch/rest/action/support/RestActions.java @@ -27,6 +27,7 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.common.xcontent.*; +import org.elasticsearch.index.query.Operator; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.QueryStringQueryBuilder; import org.elasticsearch.rest.RestRequest; @@ -97,13 +98,7 @@ public class RestActions { queryBuilder.lenient(request.paramAsBoolean("lenient", null)); String defaultOperator = request.param("default_operator"); if (defaultOperator != null) { - if ("OR".equals(defaultOperator)) { - queryBuilder.defaultOperator(QueryStringQueryBuilder.Operator.OR); - } else if ("AND".equals(defaultOperator)) { - queryBuilder.defaultOperator(QueryStringQueryBuilder.Operator.AND); - } else { - throw new IllegalArgumentException("Unsupported defaultOperator [" + defaultOperator + "], can either be [OR] or [AND]"); - } + queryBuilder.defaultOperator(Operator.fromString(defaultOperator)); } return new QuerySourceBuilder().setQuery(queryBuilder); } diff --git a/core/src/main/java/org/elasticsearch/script/expression/ExpressionScriptCompilationException.java b/core/src/main/java/org/elasticsearch/script/expression/ExpressionScriptCompilationException.java deleted file mode 100644 index 710fb20a0be..00000000000 --- a/core/src/main/java/org/elasticsearch/script/expression/ExpressionScriptCompilationException.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.script.expression; - -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.common.io.stream.StreamInput; - -import java.io.IOException; -import java.text.ParseException; - -/** - * Exception representing a compilation error in an expression. - * @deprecated not used anymore. but people dont seem to want it cleaned up. enjoy your pile of shitty exceptions, clean it up yourself. - */ -@Deprecated -public class ExpressionScriptCompilationException extends ElasticsearchException { - public ExpressionScriptCompilationException(String msg, ParseException e) { - super(msg, e); - } - public ExpressionScriptCompilationException(String msg) { - super(msg); - } - - public ExpressionScriptCompilationException(StreamInput in) throws IOException { - super(in); - } -} diff --git a/core/src/main/java/org/elasticsearch/script/expression/ExpressionScriptExecutionException.java b/core/src/main/java/org/elasticsearch/script/expression/ExpressionScriptExecutionException.java deleted file mode 100644 index 435b4b41d8b..00000000000 --- a/core/src/main/java/org/elasticsearch/script/expression/ExpressionScriptExecutionException.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.script.expression; - -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.common.io.stream.StreamInput; - -import java.io.IOException; - -/** - * Exception used to wrap exceptions occuring while running expressions. - * @deprecated not used anymore. but people dont seem to want it cleaned up. enjoy your pile of shitty exceptions, clean it up yourself. - */ -@Deprecated -public class ExpressionScriptExecutionException extends ElasticsearchException { - public ExpressionScriptExecutionException(String msg, Throwable cause) { - super(msg, cause); - } - - public ExpressionScriptExecutionException(StreamInput in) throws IOException { - super(in); - } - - public ExpressionScriptExecutionException(String msg) { - super(msg); - } -} diff --git a/core/src/main/java/org/elasticsearch/search/MultiValueMode.java b/core/src/main/java/org/elasticsearch/search/MultiValueMode.java index 32a341e308e..aec5cd18206 100644 --- a/core/src/main/java/org/elasticsearch/search/MultiValueMode.java +++ b/core/src/main/java/org/elasticsearch/search/MultiValueMode.java @@ -23,10 +23,13 @@ package org.elasticsearch.search; import org.apache.lucene.index.*; import org.apache.lucene.search.DocIdSet; import org.apache.lucene.search.DocIdSetIterator; -import org.apache.lucene.util.Bits; import org.apache.lucene.util.BitSet; +import org.apache.lucene.util.Bits; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.index.fielddata.FieldData; import org.elasticsearch.index.fielddata.NumericDoubleValues; import org.elasticsearch.index.fielddata.SortedBinaryDocValues; @@ -38,7 +41,7 @@ import java.util.Locale; /** * Defines what values to pick in the case a document contains multiple values for a particular field. */ -public enum MultiValueMode { +public enum MultiValueMode implements Writeable { /** * Pick the sum of all the values. @@ -941,4 +944,22 @@ public enum MultiValueMode { void setDocument(int docId); double valueAt(int index); } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeVInt(this.ordinal()); + } + + public static MultiValueMode readMultiValueModeFrom(StreamInput in) throws IOException { + return MultiValueMode.AVG.readFrom(in); + } + + @Override + public MultiValueMode readFrom(StreamInput in) throws IOException { + int ordinal = in.readVInt(); + if (ordinal < 0 || ordinal >= values().length) { + throw new IOException("Unknown MultiValueMode ordinal [" + ordinal + "]"); + } + return values()[ordinal]; + } } diff --git a/core/src/main/java/org/elasticsearch/search/SearchModule.java b/core/src/main/java/org/elasticsearch/search/SearchModule.java index 419f6ec40e4..7959eaa0a9a 100644 --- a/core/src/main/java/org/elasticsearch/search/SearchModule.java +++ b/core/src/main/java/org/elasticsearch/search/SearchModule.java @@ -24,7 +24,6 @@ import org.elasticsearch.common.inject.multibindings.Multibinder; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.query.functionscore.ScoreFunctionParser; import org.elasticsearch.index.query.functionscore.ScoreFunctionParserMapper; -import org.elasticsearch.index.search.morelikethis.MoreLikeThisFetchService; import org.elasticsearch.search.action.SearchServiceTransportAction; import org.elasticsearch.search.aggregations.AggregationParseElement; import org.elasticsearch.search.aggregations.AggregationPhase; @@ -67,11 +66,7 @@ import org.elasticsearch.search.aggregations.bucket.significant.UnmappedSignific import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicParser; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicParserMapper; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicStreams; -import org.elasticsearch.search.aggregations.bucket.terms.DoubleTerms; -import org.elasticsearch.search.aggregations.bucket.terms.LongTerms; -import org.elasticsearch.search.aggregations.bucket.terms.StringTerms; -import org.elasticsearch.search.aggregations.bucket.terms.TermsParser; -import org.elasticsearch.search.aggregations.bucket.terms.UnmappedTerms; +import org.elasticsearch.search.aggregations.bucket.terms.*; import org.elasticsearch.search.aggregations.metrics.avg.AvgParser; import org.elasticsearch.search.aggregations.metrics.avg.InternalAvg; import org.elasticsearch.search.aggregations.metrics.cardinality.CardinalityParser; @@ -151,7 +146,8 @@ import org.elasticsearch.search.query.QueryPhase; import org.elasticsearch.search.suggest.Suggester; import org.elasticsearch.search.suggest.Suggesters; -import java.util.*; +import java.util.HashSet; +import java.util.Set; /** * @@ -255,7 +251,7 @@ public class SearchModule extends AbstractModule { for (Class clazz : functionScoreParsers) { parserMapBinder.addBinding().to(clazz); } - bind(ScoreFunctionParserMapper.class); + bind(ScoreFunctionParserMapper.class).asEagerSingleton(); } protected void configureHighlighters() { @@ -339,8 +335,6 @@ public class SearchModule extends AbstractModule { bind(SearchPhaseController.class).asEagerSingleton(); bind(FetchPhase.class).asEagerSingleton(); bind(SearchServiceTransportAction.class).asEagerSingleton(); - bind(MoreLikeThisFetchService.class).asEagerSingleton(); - if (searchServiceImpl == SearchService.class) { bind(SearchService.class).asEagerSingleton(); } else { diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/GND.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/GND.java index 70af2f0fb4f..99ee7c73b2b 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/GND.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/GND.java @@ -28,7 +28,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.ParsingException; +import org.elasticsearch.index.query.QueryShardException; import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; @@ -117,7 +117,7 @@ public class GND extends NXYSignificanceHeuristic { @Override public SignificanceHeuristic parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher, SearchContext context) - throws IOException, ParsingException { + throws IOException, QueryShardException { String givenName = parser.currentName(); boolean backgroundIsSuperset = true; XContentParser.Token token = parser.nextToken(); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/JLHScore.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/JLHScore.java index 87dce6f68e4..97264e7d53f 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/JLHScore.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/JLHScore.java @@ -27,7 +27,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.ParsingException; +import org.elasticsearch.index.query.QueryShardException; import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; @@ -110,7 +110,7 @@ public class JLHScore extends SignificanceHeuristic { @Override public SignificanceHeuristic parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher, SearchContext context) - throws IOException, ParsingException { + throws IOException, QueryShardException { // move to the closing bracket if (!parser.nextToken().equals(XContentParser.Token.END_OBJECT)) { throw new ElasticsearchParseException("failed to parse [jhl] significance heuristic. expected an empty object, but found [{}] instead", parser.currentToken()); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/NXYSignificanceHeuristic.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/NXYSignificanceHeuristic.java index e6dcb315e33..c6a6924108c 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/NXYSignificanceHeuristic.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/NXYSignificanceHeuristic.java @@ -27,7 +27,7 @@ import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.ParsingException; +import org.elasticsearch.index.query.QueryShardException; import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; @@ -140,7 +140,7 @@ public abstract class NXYSignificanceHeuristic extends SignificanceHeuristic { @Override public SignificanceHeuristic parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher, SearchContext context) - throws IOException, ParsingException { + throws IOException, QueryShardException { String givenName = parser.currentName(); boolean includeNegatives = false; boolean backgroundIsSuperset = true; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/PercentageScore.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/PercentageScore.java index 4086d364ecb..aceae8c251b 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/PercentageScore.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/PercentageScore.java @@ -27,7 +27,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.ParsingException; +import org.elasticsearch.index.query.QueryShardException; import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; @@ -79,7 +79,7 @@ public class PercentageScore extends SignificanceHeuristic { @Override public SignificanceHeuristic parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher, SearchContext context) - throws IOException, ParsingException { + throws IOException, QueryShardException { // move to the closing bracket if (!parser.nextToken().equals(XContentParser.Token.END_OBJECT)) { throw new ElasticsearchParseException("failed to parse [percentage] significance heuristic. expected an empty object, but got [{}] instead", parser.currentToken()); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/ScriptHeuristic.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/ScriptHeuristic.java index 14c0554c952..046ca717b9f 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/ScriptHeuristic.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/ScriptHeuristic.java @@ -29,14 +29,10 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.ParsingException; -import org.elasticsearch.script.ExecutableScript; -import org.elasticsearch.script.Script; +import org.elasticsearch.index.query.QueryShardException; +import org.elasticsearch.script.*; import org.elasticsearch.script.Script.ScriptField; -import org.elasticsearch.script.ScriptContext; -import org.elasticsearch.script.ScriptParameterParser; import org.elasticsearch.script.ScriptParameterParser.ScriptParameterValue; -import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.internal.SearchContext; @@ -134,7 +130,7 @@ public class ScriptHeuristic extends SignificanceHeuristic { @Override public SignificanceHeuristic parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher, SearchContext context) - throws IOException, ParsingException { + throws IOException, QueryShardException { String heuristicName = parser.currentName(); Script script = null; XContentParser.Token token; diff --git a/core/src/main/java/org/elasticsearch/search/fetch/innerhits/InnerHitsBuilder.java b/core/src/main/java/org/elasticsearch/search/fetch/innerhits/InnerHitsBuilder.java index 125f6355a06..a14fdfe9f8f 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/innerhits/InnerHitsBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/innerhits/InnerHitsBuilder.java @@ -19,10 +19,15 @@ package org.elasticsearch.search.fetch.innerhits; +import org.elasticsearch.common.Nullable; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.index.query.support.BaseInnerHitBuilder; +import org.elasticsearch.script.Script; +import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.search.highlight.HighlightBuilder; +import org.elasticsearch.search.sort.SortBuilder; +import org.elasticsearch.search.sort.SortOrder; import java.io.IOException; import java.util.HashMap; @@ -32,12 +37,12 @@ import java.util.Map; */ public class InnerHitsBuilder implements ToXContent { - private Map innerHits = new HashMap<>(); + private final Map innerHits = new HashMap<>(); @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject("inner_hits"); - for (Map.Entry entry : innerHits.entrySet()) { + for (Map.Entry entry : innerHits.entrySet()) { builder.startObject(entry.getKey()); entry.getValue().toXContent(builder, params); builder.endObject(); @@ -45,15 +50,408 @@ public class InnerHitsBuilder implements ToXContent { return builder.endObject(); } - public void addInnerHit(String name, InnerHit innerHit) { - innerHits.put(name, innerHit); + /** + * For nested inner hits the path to collect child nested docs for. + * @param name the name / key of the inner hits in the response + * @param path the path into the nested to collect inner hits for + * @param innerHit the inner hits definition + */ + public void addNestedInnerHits(String name, String path, InnerHit innerHit) { + if (innerHits.containsKey(name)) { + throw new IllegalArgumentException("inner hits for name: [" + name +"] is already registered"); + } + innerHits.put(name, new NestedInnerHitsHolder(path, innerHit)); } - public static class InnerHit extends BaseInnerHitBuilder { + /** + * For parent/child inner hits the type to collect inner hits for. + * @param name the name / key of the inner hits in the response + * @param type the document type to collect inner hits for + * @param innerHit the inner hits definition + */ + public void addParentChildInnerHits(String name, String type, InnerHit innerHit) { + innerHits.put(name, new ParentChildInnerHitsHolder(type, innerHit)); + } + private static class InnerHitsHolder implements ToXContent{ + private final InnerHit hits; + + private InnerHitsHolder(InnerHit hits) { + this.hits = hits; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return hits.toXContent(builder, params); + } + } + + private static class ParentChildInnerHitsHolder extends InnerHitsHolder { + + private final String type; + + private ParentChildInnerHitsHolder(String type, InnerHit hits) { + super(hits); + this.type = type; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject("type").startObject(type); + super.toXContent(builder, params); + return builder.endObject().endObject(); + } + } + + private static class NestedInnerHitsHolder extends InnerHitsHolder { + + private final String path; + + private NestedInnerHitsHolder(String path, InnerHit hits) { + super(hits); + this.path = path; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject("path").startObject(path); + super.toXContent(builder, params); + return builder.endObject().endObject(); + } + } + + public static class InnerHit implements ToXContent { + + private SearchSourceBuilder sourceBuilder; private String path; private String type; + /** + * The index to start to return hits from. Defaults to 0. + */ + public InnerHit setFrom(int from) { + sourceBuilder().from(from); + return this; + } + + /** + * The number of search hits to return. Defaults to 10. + */ + public InnerHit setSize(int size) { + sourceBuilder().size(size); + return this; + } + + /** + * Applies when sorting, and controls if scores will be tracked as well. Defaults to + * false. + */ + public InnerHit setTrackScores(boolean trackScores) { + sourceBuilder().trackScores(trackScores); + return this; + } + + /** + * Should each {@link org.elasticsearch.search.SearchHit} be returned with an + * explanation of the hit (ranking). + */ + public InnerHit setExplain(boolean explain) { + sourceBuilder().explain(explain); + return this; + } + + /** + * Should each {@link org.elasticsearch.search.SearchHit} be returned with its + * version. + */ + public InnerHit setVersion(boolean version) { + sourceBuilder().version(version); + return this; + } + + /** + * Add a stored field to be loaded and returned with the inner hit. + */ + public InnerHit field(String name) { + sourceBuilder().field(name); + return this; + } + + /** + * Sets no fields to be loaded, resulting in only id and type to be returned per field. + */ + public InnerHit setNoFields() { + sourceBuilder().noFields(); + return this; + } + + /** + * Indicates whether the response should contain the stored _source for every hit + */ + public InnerHit setFetchSource(boolean fetch) { + sourceBuilder().fetchSource(fetch); + return this; + } + + /** + * Indicate that _source should be returned with every hit, with an "include" and/or "exclude" set which can include simple wildcard + * elements. + * + * @param include An optional include (optionally wildcarded) pattern to filter the returned _source + * @param exclude An optional exclude (optionally wildcarded) pattern to filter the returned _source + */ + public InnerHit setFetchSource(@Nullable String include, @Nullable String exclude) { + sourceBuilder().fetchSource(include, exclude); + return this; + } + + /** + * Indicate that _source should be returned with every hit, with an "include" and/or "exclude" set which can include simple wildcard + * elements. + * + * @param includes An optional list of include (optionally wildcarded) pattern to filter the returned _source + * @param excludes An optional list of exclude (optionally wildcarded) pattern to filter the returned _source + */ + public InnerHit setFetchSource(@Nullable String[] includes, @Nullable String[] excludes) { + sourceBuilder().fetchSource(includes, excludes); + return this; + } + + /** + * Adds a field data based field to load and return. The field does not have to be stored, + * but its recommended to use non analyzed or numeric fields. + * + * @param name The field to get from the field data cache + */ + public InnerHit addFieldDataField(String name) { + sourceBuilder().fieldDataField(name); + return this; + } + + /** + * Adds a script based field to load and return. The field does not have to be stored, + * but its recommended to use non analyzed or numeric fields. + * + * @param name The name that will represent this value in the return hit + * @param script The script to use + */ + public InnerHit addScriptField(String name, Script script) { + sourceBuilder().scriptField(name, script); + return this; + } + + /** + * Adds a sort against the given field name and the sort ordering. + * + * @param field The name of the field + * @param order The sort ordering + */ + public InnerHit addSort(String field, SortOrder order) { + sourceBuilder().sort(field, order); + return this; + } + + /** + * Adds a generic sort builder. + * + * @see org.elasticsearch.search.sort.SortBuilders + */ + public InnerHit addSort(SortBuilder sort) { + sourceBuilder().sort(sort); + return this; + } + + public HighlightBuilder highlightBuilder() { + return sourceBuilder().highlighter(); + } + + /** + * Adds a field to be highlighted with default fragment size of 100 characters, and + * default number of fragments of 5. + * + * @param name The field to highlight + */ + public InnerHit addHighlightedField(String name) { + highlightBuilder().field(name); + return this; + } + + + /** + * Adds a field to be highlighted with a provided fragment size (in characters), and + * default number of fragments of 5. + * + * @param name The field to highlight + * @param fragmentSize The size of a fragment in characters + */ + public InnerHit addHighlightedField(String name, int fragmentSize) { + highlightBuilder().field(name, fragmentSize); + return this; + } + + /** + * Adds a field to be highlighted with a provided fragment size (in characters), and + * a provided (maximum) number of fragments. + * + * @param name The field to highlight + * @param fragmentSize The size of a fragment in characters + * @param numberOfFragments The (maximum) number of fragments + */ + public InnerHit addHighlightedField(String name, int fragmentSize, int numberOfFragments) { + highlightBuilder().field(name, fragmentSize, numberOfFragments); + return this; + } + + /** + * Adds a field to be highlighted with a provided fragment size (in characters), + * a provided (maximum) number of fragments and an offset for the highlight. + * + * @param name The field to highlight + * @param fragmentSize The size of a fragment in characters + * @param numberOfFragments The (maximum) number of fragments + */ + public InnerHit addHighlightedField(String name, int fragmentSize, int numberOfFragments, + int fragmentOffset) { + highlightBuilder().field(name, fragmentSize, numberOfFragments, fragmentOffset); + return this; + } + + /** + * Adds a highlighted field. + */ + public InnerHit addHighlightedField(HighlightBuilder.Field field) { + highlightBuilder().field(field); + return this; + } + + /** + * Set a tag scheme that encapsulates a built in pre and post tags. The allows schemes + * are styled and default. + * + * @param schemaName The tag scheme name + */ + public InnerHit setHighlighterTagsSchema(String schemaName) { + highlightBuilder().tagsSchema(schemaName); + return this; + } + + public InnerHit setHighlighterFragmentSize(Integer fragmentSize) { + highlightBuilder().fragmentSize(fragmentSize); + return this; + } + + public InnerHit setHighlighterNumOfFragments(Integer numOfFragments) { + highlightBuilder().numOfFragments(numOfFragments); + return this; + } + + public InnerHit setHighlighterFilter(Boolean highlightFilter) { + highlightBuilder().highlightFilter(highlightFilter); + return this; + } + + /** + * The encoder to set for highlighting + */ + public InnerHit setHighlighterEncoder(String encoder) { + highlightBuilder().encoder(encoder); + return this; + } + + /** + * Explicitly set the pre tags that will be used for highlighting. + */ + public InnerHit setHighlighterPreTags(String... preTags) { + highlightBuilder().preTags(preTags); + return this; + } + + /** + * Explicitly set the post tags that will be used for highlighting. + */ + public InnerHit setHighlighterPostTags(String... postTags) { + highlightBuilder().postTags(postTags); + return this; + } + + /** + * The order of fragments per field. By default, ordered by the order in the + * highlighted text. Can be score, which then it will be ordered + * by score of the fragments. + */ + public InnerHit setHighlighterOrder(String order) { + highlightBuilder().order(order); + return this; + } + + public InnerHit setHighlighterRequireFieldMatch(boolean requireFieldMatch) { + highlightBuilder().requireFieldMatch(requireFieldMatch); + return this; + } + + public InnerHit setHighlighterBoundaryMaxScan(Integer boundaryMaxScan) { + highlightBuilder().boundaryMaxScan(boundaryMaxScan); + return this; + } + + public InnerHit setHighlighterBoundaryChars(char[] boundaryChars) { + highlightBuilder().boundaryChars(boundaryChars); + return this; + } + + /** + * The highlighter type to use. + */ + public InnerHit setHighlighterType(String type) { + highlightBuilder().highlighterType(type); + return this; + } + + public InnerHit setHighlighterFragmenter(String fragmenter) { + highlightBuilder().fragmenter(fragmenter); + return this; + } + + /** + * Sets a query to be used for highlighting all fields instead of the search query. + */ + public InnerHit setHighlighterQuery(QueryBuilder highlightQuery) { + highlightBuilder().highlightQuery(highlightQuery); + return this; + } + + /** + * Sets the size of the fragment to return from the beginning of the field if there are no matches to + * highlight and the field doesn't also define noMatchSize. + * + * @param noMatchSize integer to set or null to leave out of request. default is null. + * @return this builder for chaining + */ + public InnerHit setHighlighterNoMatchSize(Integer noMatchSize) { + highlightBuilder().noMatchSize(noMatchSize); + return this; + } + + /** + * Sets the maximum number of phrases the fvh will consider if the field doesn't also define phraseLimit. + */ + public InnerHit setHighlighterPhraseLimit(Integer phraseLimit) { + highlightBuilder().phraseLimit(phraseLimit); + return this; + } + + public InnerHit setHighlighterOptions(Map options) { + highlightBuilder().options(options); + return this; + } + + protected SearchSourceBuilder sourceBuilder() { + if (sourceBuilder == null) { + sourceBuilder = new SearchSourceBuilder(); + } + return sourceBuilder; + } + /** * Sets the query to run for collecting the inner hits. */ @@ -62,19 +460,15 @@ public class InnerHitsBuilder implements ToXContent { return this; } - /** - * For parent/child inner hits the type to collect inner hits for. - */ - public InnerHit setPath(String path) { - this.path = path; - return this; - } + + /** - * For nested inner hits the path to collect child nested docs for. + * Adds a nested inner hit definition that collects inner hits for hits + * on this inner hit level. */ - public InnerHit setType(String type) { - this.type = type; + public InnerHit addNestedInnerHits(String name, String path, InnerHit innerHit) { + sourceBuilder().innerHitsBuilder().addNestedInnerHits(name, path, innerHit); return this; } @@ -82,21 +476,17 @@ public class InnerHitsBuilder implements ToXContent { * Adds a nested inner hit definition that collects inner hits for hits * on this inner hit level. */ - public InnerHit addInnerHit(String name, InnerHit innerHit) { - sourceBuilder().innerHitsBuilder().addInnerHit(name, innerHit); + public InnerHit addParentChildInnerHits(String name, String type, InnerHit innerHit) { + sourceBuilder().innerHitsBuilder().addParentChildInnerHits(name, type, innerHit); return this; } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - if (path != null) { - builder.startObject("path").startObject(path); - } else { - builder.startObject("type").startObject(type); + if (sourceBuilder != null) { + sourceBuilder.innerToXContent(builder, params); } - super.toXContent(builder, params); - return builder.endObject().endObject(); + return builder; } } - } diff --git a/core/src/main/java/org/elasticsearch/search/fetch/innerhits/InnerHitsParseElement.java b/core/src/main/java/org/elasticsearch/search/fetch/innerhits/InnerHitsParseElement.java index c02e2c6d8ed..ac6dc18d7d4 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/innerhits/InnerHitsParseElement.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/innerhits/InnerHitsParseElement.java @@ -24,7 +24,7 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.object.ObjectMapper; import org.elasticsearch.index.query.ParsedQuery; -import org.elasticsearch.index.query.QueryParseContext; +import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.search.SearchParseElement; import org.elasticsearch.search.fetch.fielddata.FieldDataFieldsParseElement; import org.elasticsearch.search.fetch.script.ScriptFieldsParseElement; @@ -59,15 +59,15 @@ public class InnerHitsParseElement implements SearchParseElement { @Override public void parse(XContentParser parser, SearchContext searchContext) throws Exception { - QueryParseContext parseContext = searchContext.queryParserService().getParseContext(); - parseContext.reset(parser); - Map innerHitsMap = parseInnerHits(parser, parseContext, searchContext); + QueryShardContext context = searchContext.queryParserService().getShardContext(); + context.reset(parser); + Map innerHitsMap = parseInnerHits(parser, context, searchContext); if (innerHitsMap != null) { searchContext.innerHits(new InnerHitsContext(innerHitsMap)); } } - private Map parseInnerHits(XContentParser parser, QueryParseContext parseContext, SearchContext searchContext) throws Exception { + private Map parseInnerHits(XContentParser parser, QueryShardContext context, SearchContext searchContext) throws Exception { XContentParser.Token token; Map innerHitsMap = null; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { @@ -79,7 +79,7 @@ public class InnerHitsParseElement implements SearchParseElement { if (token != XContentParser.Token.START_OBJECT) { throw new IllegalArgumentException("Inner hit definition for [" + innerHitName + " starts with a [" + token + "], expected a [" + XContentParser.Token.START_OBJECT + "]."); } - InnerHitsContext.BaseInnerHits innerHits = parseInnerHit(parser, parseContext, searchContext, innerHitName); + InnerHitsContext.BaseInnerHits innerHits = parseInnerHit(parser, context, searchContext, innerHitName); if (innerHitsMap == null) { innerHitsMap = new HashMap<>(); } @@ -88,7 +88,7 @@ public class InnerHitsParseElement implements SearchParseElement { return innerHitsMap; } - private InnerHitsContext.BaseInnerHits parseInnerHit(XContentParser parser, QueryParseContext parseContext, SearchContext searchContext, String innerHitName) throws Exception { + private InnerHitsContext.BaseInnerHits parseInnerHit(XContentParser parser, QueryShardContext context, SearchContext searchContext, String innerHitName) throws Exception { XContentParser.Token token = parser.nextToken(); if (token != XContentParser.Token.FIELD_NAME) { throw new IllegalArgumentException("Unexpected token " + token + " inside inner hit definition. Either specify [path] or [type] object"); @@ -123,9 +123,9 @@ public class InnerHitsParseElement implements SearchParseElement { final InnerHitsContext.BaseInnerHits innerHits; if (nestedPath != null) { - innerHits = parseNested(parser, parseContext, searchContext, fieldName); + innerHits = parseNested(parser, context, searchContext, fieldName); } else if (type != null) { - innerHits = parseParentChild(parser, parseContext, searchContext, fieldName); + innerHits = parseParentChild(parser, context, searchContext, fieldName); } else { throw new IllegalArgumentException("Either [path] or [type] must be defined"); } @@ -143,16 +143,16 @@ public class InnerHitsParseElement implements SearchParseElement { return innerHits; } - private InnerHitsContext.ParentChildInnerHits parseParentChild(XContentParser parser, QueryParseContext parseContext, SearchContext searchContext, String type) throws Exception { - ParseResult parseResult = parseSubSearchContext(searchContext, parseContext, parser); + private InnerHitsContext.ParentChildInnerHits parseParentChild(XContentParser parser, QueryShardContext context, SearchContext searchContext, String type) throws Exception { + ParseResult parseResult = parseSubSearchContext(searchContext, context, parser); DocumentMapper documentMapper = searchContext.mapperService().documentMapper(type); if (documentMapper == null) { throw new IllegalArgumentException("type [" + type + "] doesn't exist"); } - return new InnerHitsContext.ParentChildInnerHits(parseResult.context(), parseResult.query(), parseResult.childInnerHits(), parseContext.mapperService(), documentMapper); + return new InnerHitsContext.ParentChildInnerHits(parseResult.context(), parseResult.query(), parseResult.childInnerHits(), context.mapperService(), documentMapper); } - private InnerHitsContext.NestedInnerHits parseNested(XContentParser parser, QueryParseContext parseContext, SearchContext searchContext, String nestedPath) throws Exception { + private InnerHitsContext.NestedInnerHits parseNested(XContentParser parser, QueryShardContext context, SearchContext searchContext, String nestedPath) throws Exception { ObjectMapper objectMapper = searchContext.getObjectMapper(nestedPath); if (objectMapper == null) { throw new IllegalArgumentException("path [" + nestedPath +"] doesn't exist"); @@ -160,14 +160,14 @@ public class InnerHitsParseElement implements SearchParseElement { if (objectMapper.nested().isNested() == false) { throw new IllegalArgumentException("path [" + nestedPath +"] isn't nested"); } - ObjectMapper parentObjectMapper = parseContext.nestedScope().nextLevel(objectMapper); - ParseResult parseResult = parseSubSearchContext(searchContext, parseContext, parser); - parseContext.nestedScope().previousLevel(); + ObjectMapper parentObjectMapper = context.nestedScope().nextLevel(objectMapper); + ParseResult parseResult = parseSubSearchContext(searchContext, context, parser); + context.nestedScope().previousLevel(); return new InnerHitsContext.NestedInnerHits(parseResult.context(), parseResult.query(), parseResult.childInnerHits(), parentObjectMapper, objectMapper); } - private ParseResult parseSubSearchContext(SearchContext searchContext, QueryParseContext parseContext, XContentParser parser) throws Exception { + private ParseResult parseSubSearchContext(SearchContext searchContext, QueryShardContext context, XContentParser parser) throws Exception { ParsedQuery query = null; Map childInnerHits = null; SubSearchContext subSearchContext = new SubSearchContext(searchContext); @@ -178,10 +178,10 @@ public class InnerHitsParseElement implements SearchParseElement { fieldName = parser.currentName(); } else if (token == XContentParser.Token.START_OBJECT) { if ("query".equals(fieldName)) { - Query q = searchContext.queryParserService().parseInnerQuery(parseContext); - query = new ParsedQuery(q, parseContext.copyNamedQueries()); + Query q = searchContext.queryParserService().parseInnerQuery(context); + query = new ParsedQuery(q, context.copyNamedQueries()); } else if ("inner_hits".equals(fieldName)) { - childInnerHits = parseInnerHits(parser, parseContext, searchContext); + childInnerHits = parseInnerHits(parser, context, searchContext); } else { parseCommonInnerHitOptions(parser, token, fieldName, subSearchContext, sortParseElement, sourceParseElement, highlighterParseElement, scriptFieldsParseElement, fieldDataFieldsParseElement); } diff --git a/core/src/main/java/org/elasticsearch/search/internal/SearchContext.java b/core/src/main/java/org/elasticsearch/search/internal/SearchContext.java index 6402d5e53d8..73665a54238 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/SearchContext.java +++ b/core/src/main/java/org/elasticsearch/search/internal/SearchContext.java @@ -41,7 +41,7 @@ import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.object.ObjectMapper; import org.elasticsearch.index.query.IndexQueryParserService; import org.elasticsearch.index.query.ParsedQuery; -import org.elasticsearch.index.query.QueryParseContext; +import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.script.ScriptService; @@ -70,12 +70,12 @@ public abstract class SearchContext extends DelegatingHasContextAndHeaders imple public static void setCurrent(SearchContext value) { current.set(value); - QueryParseContext.setTypes(value.types()); + QueryShardContext.setTypes(value.types()); } public static void removeCurrent() { current.remove(); - QueryParseContext.removeTypes(); + QueryShardContext.removeTypes(); } public static SearchContext current() { diff --git a/core/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortParser.java b/core/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortParser.java index f10db634b23..405e2cc7e23 100644 --- a/core/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortParser.java +++ b/core/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortParser.java @@ -170,7 +170,6 @@ public class GeoDistanceSortParser implements SortParser { final Nested nested; if (nestedHelper != null && nestedHelper.getPath() != null) { - BitSetProducer rootDocumentsFilter = context.bitsetFilterCache().getBitSetProducer(Queries.newNonNestedFilter()); Filter innerDocumentsFilter; if (nestedHelper.filterFound()) { diff --git a/core/src/test/java/org/apache/lucene/queries/BlendedTermQueryTests.java b/core/src/test/java/org/apache/lucene/queries/BlendedTermQueryTests.java index 7f729c75d2b..91eaeb2607b 100644 --- a/core/src/test/java/org/apache/lucene/queries/BlendedTermQueryTests.java +++ b/core/src/test/java/org/apache/lucene/queries/BlendedTermQueryTests.java @@ -23,19 +23,8 @@ import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.FieldType; import org.apache.lucene.document.TextField; -import org.apache.lucene.index.DirectoryReader; -import org.apache.lucene.index.IndexOptions; -import org.apache.lucene.index.IndexWriter; -import org.apache.lucene.index.MultiReader; -import org.apache.lucene.index.Term; -import org.apache.lucene.search.BooleanClause; -import org.apache.lucene.search.BooleanQuery; -import org.apache.lucene.search.DisjunctionMaxQuery; -import org.apache.lucene.search.IndexSearcher; -import org.apache.lucene.search.QueryUtils; -import org.apache.lucene.search.ScoreDoc; -import org.apache.lucene.search.TermQuery; -import org.apache.lucene.search.TopDocs; +import org.apache.lucene.index.*; +import org.apache.lucene.search.*; import org.apache.lucene.search.similarities.BM25Similarity; import org.apache.lucene.search.similarities.DefaultSimilarity; import org.apache.lucene.search.similarities.Similarity; @@ -45,11 +34,7 @@ import org.elasticsearch.test.ESTestCase; import org.junit.Test; import java.io.IOException; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashSet; -import java.util.List; -import java.util.Set; +import java.util.*; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; diff --git a/core/src/test/java/org/elasticsearch/ESExceptionTests.java b/core/src/test/java/org/elasticsearch/ESExceptionTests.java index c6895993ce9..5bb3bf4c13a 100644 --- a/core/src/test/java/org/elasticsearch/ESExceptionTests.java +++ b/core/src/test/java/org/elasticsearch/ESExceptionTests.java @@ -37,7 +37,7 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentLocation; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; -import org.elasticsearch.index.query.TestParsingException; +import org.elasticsearch.index.query.*; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchParseException; import org.elasticsearch.search.SearchShardTarget; @@ -87,9 +87,9 @@ public class ESExceptionTests extends ESTestCase { assertEquals(rootCauses.length, 1); assertEquals(ElasticsearchException.getExceptionName(rootCauses[0]), "index_not_found_exception"); assertEquals(rootCauses[0].getMessage(), "no such index"); - ShardSearchFailure failure = new ShardSearchFailure(new TestParsingException(new Index("foo"), "foobar", null), + ShardSearchFailure failure = new ShardSearchFailure(new ParsingException(1, 2, "foobar", null), new SearchShardTarget("node_1", "foo", 1)); - ShardSearchFailure failure1 = new ShardSearchFailure(new TestParsingException(new Index("foo"), "foobar", null), + ShardSearchFailure failure1 = new ShardSearchFailure(new ParsingException(1, 2, "foobar", null), new SearchShardTarget("node_1", "foo", 2)); SearchPhaseExecutionException ex = new SearchPhaseExecutionException("search", "all shards failed", new ShardSearchFailure[]{failure, failure1}); if (randomBoolean()) { @@ -97,7 +97,7 @@ public class ESExceptionTests extends ESTestCase { } else { rootCauses = ElasticsearchException.guessRootCauses(randomBoolean() ? new RemoteTransportException("remoteboom", ex) : ex); } - assertEquals(ElasticsearchException.getExceptionName(rootCauses[0]), "test_parsing_exception"); + assertEquals(ElasticsearchException.getExceptionName(rootCauses[0]), "parsing_exception"); assertEquals(rootCauses[0].getMessage(), "foobar"); ElasticsearchException oneLevel = new ElasticsearchException("foo", new RuntimeException("foobar")); @@ -107,23 +107,22 @@ public class ESExceptionTests extends ESTestCase { } { ShardSearchFailure failure = new ShardSearchFailure( - new TestParsingException(new Index("foo"), 1, 2, "foobar", null), + new ParsingException(1, 2, "foobar", null), new SearchShardTarget("node_1", "foo", 1)); - ShardSearchFailure failure1 = new ShardSearchFailure(new TestParsingException(new Index("foo1"), 1, 2, "foobar", null), + ShardSearchFailure failure1 = new ShardSearchFailure(new QueryShardException(new Index("foo1"), "foobar", null), new SearchShardTarget("node_1", "foo1", 1)); - ShardSearchFailure failure2 = new ShardSearchFailure(new TestParsingException(new Index("foo1"), 1, 2, "foobar", null), + ShardSearchFailure failure2 = new ShardSearchFailure(new QueryShardException(new Index("foo1"), "foobar", null), new SearchShardTarget("node_1", "foo1", 2)); SearchPhaseExecutionException ex = new SearchPhaseExecutionException("search", "all shards failed", new ShardSearchFailure[]{failure, failure1, failure2}); final ElasticsearchException[] rootCauses = ex.guessRootCauses(); assertEquals(rootCauses.length, 2); - assertEquals(ElasticsearchException.getExceptionName(rootCauses[0]), "test_parsing_exception"); + assertEquals(ElasticsearchException.getExceptionName(rootCauses[0]), "parsing_exception"); assertEquals(rootCauses[0].getMessage(), "foobar"); - assertEquals(((ParsingException)rootCauses[0]).getIndex(), "foo"); - assertEquals(ElasticsearchException.getExceptionName(rootCauses[1]), "test_parsing_exception"); + assertEquals(((ParsingException) rootCauses[0]).getLineNumber(), 1); + assertEquals(((ParsingException) rootCauses[0]).getColumnNumber(), 2); + assertEquals(ElasticsearchException.getExceptionName(rootCauses[1]), "query_shard_exception"); + assertEquals((rootCauses[1]).getIndex(), "foo1"); assertEquals(rootCauses[1].getMessage(), "foobar"); - assertEquals(((ParsingException) rootCauses[1]).getLineNumber(), 1); - assertEquals(((ParsingException) rootCauses[1]).getColumnNumber(), 2); - } { @@ -139,31 +138,31 @@ public class ESExceptionTests extends ESTestCase { public void testDeduplicate() throws IOException { { - ShardSearchFailure failure = new ShardSearchFailure(new TestParsingException(new Index("foo"), "foobar", null), + ShardSearchFailure failure = new ShardSearchFailure(new ParsingException(1, 2, "foobar", null), new SearchShardTarget("node_1", "foo", 1)); - ShardSearchFailure failure1 = new ShardSearchFailure(new TestParsingException(new Index("foo"), "foobar", null), + ShardSearchFailure failure1 = new ShardSearchFailure(new ParsingException(1, 2, "foobar", null), new SearchShardTarget("node_1", "foo", 2)); SearchPhaseExecutionException ex = new SearchPhaseExecutionException("search", "all shards failed", new ShardSearchFailure[]{failure, failure1}); XContentBuilder builder = XContentFactory.jsonBuilder(); builder.startObject(); ex.toXContent(builder, PARAMS); builder.endObject(); - String expected = "{\"type\":\"search_phase_execution_exception\",\"reason\":\"all shards failed\",\"phase\":\"search\",\"grouped\":true,\"failed_shards\":[{\"shard\":1,\"index\":\"foo\",\"node\":\"node_1\",\"reason\":{\"type\":\"test_parsing_exception\",\"reason\":\"foobar\",\"index\":\"foo\"}}]}"; + String expected = "{\"type\":\"search_phase_execution_exception\",\"reason\":\"all shards failed\",\"phase\":\"search\",\"grouped\":true,\"failed_shards\":[{\"shard\":1,\"index\":\"foo\",\"node\":\"node_1\",\"reason\":{\"type\":\"parsing_exception\",\"reason\":\"foobar\",\"line\":1,\"col\":2}}]}"; assertEquals(expected, builder.string()); } { - ShardSearchFailure failure = new ShardSearchFailure(new TestParsingException(new Index("foo"), "foobar", null), + ShardSearchFailure failure = new ShardSearchFailure(new ParsingException(1, 2, "foobar", null), new SearchShardTarget("node_1", "foo", 1)); - ShardSearchFailure failure1 = new ShardSearchFailure(new TestParsingException(new Index("foo1"), "foobar", null), + ShardSearchFailure failure1 = new ShardSearchFailure(new QueryShardException(new Index("foo1"), "foobar", null), new SearchShardTarget("node_1", "foo1", 1)); - ShardSearchFailure failure2 = new ShardSearchFailure(new TestParsingException(new Index("foo1"), "foobar", null), + ShardSearchFailure failure2 = new ShardSearchFailure(new QueryShardException(new Index("foo1"), "foobar", null), new SearchShardTarget("node_1", "foo1", 2)); SearchPhaseExecutionException ex = new SearchPhaseExecutionException("search", "all shards failed", new ShardSearchFailure[]{failure, failure1, failure2}); XContentBuilder builder = XContentFactory.jsonBuilder(); builder.startObject(); ex.toXContent(builder, PARAMS); builder.endObject(); - String expected = "{\"type\":\"search_phase_execution_exception\",\"reason\":\"all shards failed\",\"phase\":\"search\",\"grouped\":true,\"failed_shards\":[{\"shard\":1,\"index\":\"foo\",\"node\":\"node_1\",\"reason\":{\"type\":\"test_parsing_exception\",\"reason\":\"foobar\",\"index\":\"foo\"}},{\"shard\":1,\"index\":\"foo1\",\"node\":\"node_1\",\"reason\":{\"type\":\"test_parsing_exception\",\"reason\":\"foobar\",\"index\":\"foo1\"}}]}"; + String expected = "{\"type\":\"search_phase_execution_exception\",\"reason\":\"all shards failed\",\"phase\":\"search\",\"grouped\":true,\"failed_shards\":[{\"shard\":1,\"index\":\"foo\",\"node\":\"node_1\",\"reason\":{\"type\":\"parsing_exception\",\"reason\":\"foobar\",\"line\":1,\"col\":2}},{\"shard\":1,\"index\":\"foo1\",\"node\":\"node_1\",\"reason\":{\"type\":\"query_shard_exception\",\"reason\":\"foobar\",\"index\":\"foo1\"}}]}"; assertEquals(expected, builder.string()); } } @@ -219,12 +218,12 @@ public class ESExceptionTests extends ESTestCase { } { - ParsingException ex = new TestParsingException(new Index("foo"), 1, 2, "foobar", null); + ParsingException ex = new ParsingException(1, 2, "foobar", null); XContentBuilder builder = XContentFactory.jsonBuilder(); builder.startObject(); ElasticsearchException.toXContent(builder, PARAMS, ex); builder.endObject(); - String expected = "{\"type\":\"test_parsing_exception\",\"reason\":\"foobar\",\"index\":\"foo\",\"line\":1,\"col\":2}"; + String expected = "{\"type\":\"parsing_exception\",\"reason\":\"foobar\",\"line\":1,\"col\":2}"; assertEquals(expected, builder.string()); } @@ -245,7 +244,7 @@ public class ESExceptionTests extends ESTestCase { } { // render header - ParsingException ex = new TestParsingException(new Index("foo"), 1, 2, "foobar", null); + ParsingException ex = new ParsingException(1, 2, "foobar", null); ex.addHeader("test", "some value"); ex.addHeader("test_multi", "some value", "another value"); XContentBuilder builder = XContentFactory.jsonBuilder(); @@ -253,15 +252,15 @@ public class ESExceptionTests extends ESTestCase { ElasticsearchException.toXContent(builder, PARAMS, ex); builder.endObject(); assertThat(builder.string(), Matchers.anyOf( // iteration order depends on platform - equalTo("{\"type\":\"test_parsing_exception\",\"reason\":\"foobar\",\"index\":\"foo\",\"line\":1,\"col\":2,\"header\":{\"test_multi\":[\"some value\",\"another value\"],\"test\":\"some value\"}}"), - equalTo("{\"type\":\"test_parsing_exception\",\"reason\":\"foobar\",\"index\":\"foo\",\"line\":1,\"col\":2,\"header\":{\"test\":\"some value\",\"test_multi\":[\"some value\",\"another value\"]}}") + equalTo("{\"type\":\"parsing_exception\",\"reason\":\"foobar\",\"line\":1,\"col\":2,\"header\":{\"test_multi\":[\"some value\",\"another value\"],\"test\":\"some value\"}}"), + equalTo("{\"type\":\"parsing_exception\",\"reason\":\"foobar\",\"line\":1,\"col\":2,\"header\":{\"test\":\"some value\",\"test_multi\":[\"some value\",\"another value\"]}}") )); } } public void testSerializeElasticsearchException() throws IOException { BytesStreamOutput out = new BytesStreamOutput(); - ParsingException ex = new ParsingException(new Index("foo"), 1, 2, "foobar", null); + ParsingException ex = new ParsingException(1, 2, "foobar", null); out.writeThrowable(ex); StreamInput in = StreamInput.wrap(out.bytes()); @@ -274,8 +273,8 @@ public class ESExceptionTests extends ESTestCase { public void testSerializeUnknownException() throws IOException { BytesStreamOutput out = new BytesStreamOutput(); - ParsingException parsingException = new ParsingException(new Index("foo"), 1, 2, "foobar", null); - Throwable ex = new Throwable("wtf", parsingException); + ParsingException ParsingException = new ParsingException(1, 2, "foobar", null); + Throwable ex = new Throwable("wtf", ParsingException); out.writeThrowable(ex); StreamInput in = StreamInput.wrap(out.bytes()); @@ -283,10 +282,10 @@ public class ESExceptionTests extends ESTestCase { assertEquals("wtf", throwable.getMessage()); assertTrue(throwable instanceof ElasticsearchException); ParsingException e = (ParsingException)throwable.getCause(); - assertEquals(parsingException.getIndex(), e.getIndex()); - assertEquals(parsingException.getMessage(), e.getMessage()); - assertEquals(parsingException.getLineNumber(), e.getLineNumber()); - assertEquals(parsingException.getColumnNumber(), e.getColumnNumber()); + assertEquals(ParsingException.getIndex(), e.getIndex()); + assertEquals(ParsingException.getMessage(), e.getMessage()); + assertEquals(ParsingException.getLineNumber(), e.getLineNumber()); + assertEquals(ParsingException.getColumnNumber(), e.getColumnNumber()); } public void testWriteThrowable() throws IOException { @@ -309,7 +308,7 @@ public class ESExceptionTests extends ESTestCase { new OutOfMemoryError("no memory left"), new AlreadyClosedException("closed!!", new NullPointerException()), new LockObtainFailedException("can't lock directory", new NullPointerException()), - new Throwable("this exception is unknown", new ParsingException(new Index("foo"), 1, 2, "foobar", null) ), // somethin unknown + new Throwable("this exception is unknown", new QueryShardException(new Index("foo"), "foobar", null) ), // somethin unknown }; for (Throwable t : causes) { BytesStreamOutput out = new BytesStreamOutput(); diff --git a/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java b/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java index f62f50e206f..d9b25d82322 100644 --- a/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java +++ b/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java @@ -20,7 +20,6 @@ package org.elasticsearch; import com.fasterxml.jackson.core.JsonLocation; import com.fasterxml.jackson.core.JsonParseException; - import org.apache.lucene.util.Constants; import org.codehaus.groovy.runtime.typehandling.GroovyCastException; import org.elasticsearch.action.FailedNodeException; @@ -60,7 +59,12 @@ import org.elasticsearch.index.engine.CreateFailedEngineException; import org.elasticsearch.index.engine.IndexFailedEngineException; import org.elasticsearch.index.engine.RecoveryEngineException; import org.elasticsearch.index.mapper.MergeMappingException; +<<<<<<< HEAD import org.elasticsearch.index.query.TestParsingException; +======= +import org.elasticsearch.common.ParsingException; +import org.elasticsearch.index.query.QueryShardException; +>>>>>>> master import org.elasticsearch.index.shard.IllegalIndexShardStateException; import org.elasticsearch.index.shard.IndexShardState; import org.elasticsearch.index.shard.ShardId; @@ -112,7 +116,6 @@ public class ExceptionSerializationTests extends ESTestCase { final Path startPath = PathUtils.get(ElasticsearchException.class.getProtectionDomain().getCodeSource().getLocation().toURI()).resolve("org").resolve("elasticsearch"); final Set ignore = Sets.newHashSet( org.elasticsearch.test.rest.parser.RestTestParseException.class, - TestParsingException.class, org.elasticsearch.test.rest.client.RestException.class, CancellableThreadsTests.CustomException.class, org.elasticsearch.rest.BytesRestResponseTests.WithHeadersException.class, @@ -228,19 +231,29 @@ public class ExceptionSerializationTests extends ESTestCase { } public void testParsingException() throws IOException { - ParsingException ex = serialize(new ParsingException(new Index("foo"), 1, 2, "fobar", null)); - assertEquals(ex.getIndex(), "foo"); + ParsingException ex = serialize(new ParsingException(1, 2, "fobar", null)); + assertNull(ex.getIndex()); assertEquals(ex.getMessage(), "fobar"); assertEquals(ex.getLineNumber(),1); assertEquals(ex.getColumnNumber(), 2); - ex = serialize(new ParsingException(null, 1, 2, null, null)); + ex = serialize(new ParsingException(1, 2, null, null)); assertNull(ex.getIndex()); assertNull(ex.getMessage()); assertEquals(ex.getLineNumber(),1); assertEquals(ex.getColumnNumber(), 2); } + public void testQueryShardException() throws IOException { + QueryShardException ex = serialize(new QueryShardException(new Index("foo"), "fobar", null)); + assertEquals(ex.getIndex(), "foo"); + assertEquals(ex.getMessage(), "fobar"); + + ex = serialize(new QueryShardException((Index)null, null, null)); + assertNull(ex.getIndex()); + assertNull(ex.getMessage()); + } + public void testSearchException() throws IOException { SearchShardTarget target = new SearchShardTarget("foo", "bar", 1); SearchException ex = serialize(new SearchException(target, "hello world")); @@ -635,14 +648,4 @@ public class ExceptionSerializationTests extends ESTestCase { InterruptedException ex = serialize(orig); assertEquals(orig.getMessage(), ex.getMessage()); } - - public static class UnknownException extends Exception { - public UnknownException(String message) { - super(message); - } - - public UnknownException(String message, Throwable cause) { - super(message, cause); - } - } } diff --git a/core/src/test/java/org/elasticsearch/aliases/IndexAliasesIT.java b/core/src/test/java/org/elasticsearch/aliases/IndexAliasesIT.java index ccde0fac8f7..6bbec12dc12 100644 --- a/core/src/test/java/org/elasticsearch/aliases/IndexAliasesIT.java +++ b/core/src/test/java/org/elasticsearch/aliases/IndexAliasesIT.java @@ -149,7 +149,7 @@ public class IndexAliasesIT extends ESIntegTestCase { logger.info("--> making sure that filter was stored with alias [alias1] and filter [user:kimchy]"); ClusterState clusterState = admin().cluster().prepareState().get().getState(); IndexMetaData indexMd = clusterState.metaData().index("test"); - assertThat(indexMd.aliases().get("alias1").filter().string(), equalTo("{\"term\":{\"user\":\"kimchy\"}}")); + assertThat(indexMd.aliases().get("alias1").filter().string(), equalTo("{\"term\":{\"user\":{\"value\":\"kimchy\",\"boost\":1.0}}}")); } @@ -411,8 +411,8 @@ public class IndexAliasesIT extends ESIntegTestCase { assertThat(client().prepareCount("bars").setQuery(QueryBuilders.matchAllQuery()).get().getCount(), equalTo(1L)); } - - + + @Test public void testDeleteAliases() throws Exception { logger.info("--> creating index [test1] and [test2]"); @@ -432,17 +432,17 @@ public class IndexAliasesIT extends ESIntegTestCase { .addAlias("test2", "aliasToTests") .addAlias("test2", "foos", termQuery("name", "foo")) .addAlias("test2", "tests", termQuery("name", "test"))); - - String[] indices = {"test1", "test2"}; + + String[] indices = {"test1", "test2"}; String[] aliases = {"aliasToTest1", "foos", "bars", "tests", "aliasToTest2", "aliasToTests"}; - + admin().indices().prepareAliases().removeAlias(indices, aliases).get(); - + AliasesExistResponse response = admin().indices().prepareAliasesExist(aliases).get(); assertThat(response.exists(), equalTo(false)); } - + @Test public void testWaitForAliasCreationMultipleShards() throws Exception { logger.info("--> creating index [test]"); @@ -530,16 +530,16 @@ public class IndexAliasesIT extends ESIntegTestCase { logger.info("--> verify that filter was updated"); AliasMetaData aliasMetaData = ((AliasOrIndex.Alias) internalCluster().clusterService().state().metaData().getAliasAndIndexLookup().get("alias1")).getFirstAliasMetaData(); - assertThat(aliasMetaData.getFilter().toString(), equalTo("{\"term\":{\"name\":\"bar\"}}")); + assertThat(aliasMetaData.getFilter().toString(), equalTo("{\"term\":{\"name\":{\"value\":\"bar\",\"boost\":1.0}}}")); logger.info("--> deleting alias1"); stopWatch.start(); assertAcked((admin().indices().prepareAliases().removeAlias("test", "alias1").setTimeout(timeout))); assertThat(stopWatch.stop().lastTaskTime().millis(), lessThan(timeout.millis())); - + } - + @Test(expected = AliasesNotFoundException.class) public void testIndicesRemoveNonExistingAliasResponds404() throws Exception { logger.info("--> creating index [test]"); diff --git a/core/src/test/java/org/elasticsearch/benchmark/search/child/ChildSearchBenchmark.java b/core/src/test/java/org/elasticsearch/benchmark/search/child/ChildSearchBenchmark.java index bc0d1880ddb..3d22f07a52d 100644 --- a/core/src/test/java/org/elasticsearch/benchmark/search/child/ChildSearchBenchmark.java +++ b/core/src/test/java/org/elasticsearch/benchmark/search/child/ChildSearchBenchmark.java @@ -18,6 +18,7 @@ */ package org.elasticsearch.benchmark.search.child; +import org.apache.lucene.search.join.ScoreMode; import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; @@ -281,12 +282,12 @@ public class ChildSearchBenchmark { System.out.println("--> Running has_child query with score type"); // run parent child score query for (int j = 0; j < QUERY_WARMUP; j++) { - client.prepareSearch(indexName).setQuery(hasChildQuery("child", termQuery("field2", parentChildIndexGenerator.getQueryValue())).scoreMode("max")).execute().actionGet(); + client.prepareSearch(indexName).setQuery(hasChildQuery("child", termQuery("field2", parentChildIndexGenerator.getQueryValue())).scoreMode(ScoreMode.Max)).execute().actionGet(); } totalQueryTime = 0; for (int j = 0; j < QUERY_COUNT; j++) { - SearchResponse searchResponse = client.prepareSearch(indexName).setQuery(hasChildQuery("child", termQuery("field2", parentChildIndexGenerator.getQueryValue())).scoreMode("max")).execute().actionGet(); + SearchResponse searchResponse = client.prepareSearch(indexName).setQuery(hasChildQuery("child", termQuery("field2", parentChildIndexGenerator.getQueryValue())).scoreMode(ScoreMode.Max)).execute().actionGet(); if (j % 10 == 0) { System.out.println("--> hits [" + j + "], got [" + searchResponse.getHits().totalHits() + "]"); } @@ -296,7 +297,7 @@ public class ChildSearchBenchmark { totalQueryTime = 0; for (int j = 0; j < QUERY_COUNT; j++) { - SearchResponse searchResponse = client.prepareSearch(indexName).setQuery(hasChildQuery("child", matchAllQuery()).scoreMode("max")).execute().actionGet(); + SearchResponse searchResponse = client.prepareSearch(indexName).setQuery(hasChildQuery("child", matchAllQuery()).scoreMode(ScoreMode.Max)).execute().actionGet(); if (j % 10 == 0) { System.out.println("--> hits [" + j + "], got [" + searchResponse.getHits().totalHits() + "]"); } @@ -307,12 +308,12 @@ public class ChildSearchBenchmark { System.out.println("--> Running has_parent query with score type"); // run parent child score query for (int j = 0; j < QUERY_WARMUP; j++) { - client.prepareSearch(indexName).setQuery(hasParentQuery("parent", termQuery("field1", parentChildIndexGenerator.getQueryValue())).scoreMode("score")).execute().actionGet(); + client.prepareSearch(indexName).setQuery(hasParentQuery("parent", termQuery("field1", parentChildIndexGenerator.getQueryValue())).score(true)).execute().actionGet(); } totalQueryTime = 0; for (int j = 1; j < QUERY_COUNT; j++) { - SearchResponse searchResponse = client.prepareSearch(indexName).setQuery(hasParentQuery("parent", termQuery("field1", parentChildIndexGenerator.getQueryValue())).scoreMode("score")).execute().actionGet(); + SearchResponse searchResponse = client.prepareSearch(indexName).setQuery(hasParentQuery("parent", termQuery("field1", parentChildIndexGenerator.getQueryValue())).score(true)).execute().actionGet(); if (j % 10 == 0) { System.out.println("--> hits [" + j + "], got [" + searchResponse.getHits().totalHits() + "]"); } @@ -322,7 +323,7 @@ public class ChildSearchBenchmark { totalQueryTime = 0; for (int j = 1; j < QUERY_COUNT; j++) { - SearchResponse searchResponse = client.prepareSearch(indexName).setQuery(hasParentQuery("parent", matchAllQuery()).scoreMode("score")).execute().actionGet(); + SearchResponse searchResponse = client.prepareSearch(indexName).setQuery(hasParentQuery("parent", matchAllQuery()).score(true)).execute().actionGet(); if (j % 10 == 0) { System.out.println("--> hits [" + j + "], got [" + searchResponse.getHits().totalHits() + "]"); } diff --git a/core/src/test/java/org/elasticsearch/benchmark/search/child/ChildSearchShortCircuitBenchmark.java b/core/src/test/java/org/elasticsearch/benchmark/search/child/ChildSearchShortCircuitBenchmark.java index f7eaa743225..388bf954822 100644 --- a/core/src/test/java/org/elasticsearch/benchmark/search/child/ChildSearchShortCircuitBenchmark.java +++ b/core/src/test/java/org/elasticsearch/benchmark/search/child/ChildSearchShortCircuitBenchmark.java @@ -18,6 +18,7 @@ */ package org.elasticsearch.benchmark.search.child; +import org.apache.lucene.search.join.ScoreMode; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; import org.elasticsearch.action.bulk.BulkRequestBuilder; @@ -178,7 +179,7 @@ public class ChildSearchShortCircuitBenchmark { for (int i = 1; i < PARENT_COUNT; i *= 2) { for (int j = 0; j < QUERY_COUNT; j++) { SearchResponse searchResponse = client.prepareSearch(indexName) - .setQuery(hasChildQuery("child", matchQuery("field2", i)).scoreMode("max")) + .setQuery(hasChildQuery("child", matchQuery("field2", i)).scoreMode(ScoreMode.Max)) .execute().actionGet(); if (searchResponse.getHits().totalHits() != i) { System.err.println("--> mismatch on hits"); diff --git a/core/src/test/java/org/elasticsearch/bootstrap/BootstrapForTesting.java b/core/src/test/java/org/elasticsearch/bootstrap/BootstrapForTesting.java index 0f9664c9678..3dcd8ff0402 100644 --- a/core/src/test/java/org/elasticsearch/bootstrap/BootstrapForTesting.java +++ b/core/src/test/java/org/elasticsearch/bootstrap/BootstrapForTesting.java @@ -52,7 +52,7 @@ public class BootstrapForTesting { static { // just like bootstrap, initialize natives, then SM - Bootstrap.initializeNatives(true, true); + Bootstrap.initializeNatives(true, true, true); // initialize probes Bootstrap.initializeProbes(); diff --git a/core/src/test/java/org/elasticsearch/bootstrap/SeccompTests.java b/core/src/test/java/org/elasticsearch/bootstrap/SeccompTests.java new file mode 100644 index 00000000000..3fbaf5a60e1 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/bootstrap/SeccompTests.java @@ -0,0 +1,73 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.bootstrap; + +import org.elasticsearch.test.ESTestCase; + +/** Simple tests seccomp filter is working. */ +public class SeccompTests extends ESTestCase { + + @Override + public void setUp() throws Exception { + super.setUp(); + assumeTrue("requires seccomp filter installation", Natives.isSeccompInstalled()); + // otherwise security manager will block the execution, no fun + assumeTrue("cannot test with security manager enabled", System.getSecurityManager() == null); + } + + public void testNoExecution() throws Exception { + try { + Runtime.getRuntime().exec("ls"); + fail("should not have been able to execute!"); + } catch (Exception expected) { + // we can't guarantee how its converted, currently its an IOException, like this: + /* + java.io.IOException: Cannot run program "ls": error=13, Permission denied + at __randomizedtesting.SeedInfo.seed([65E6C4BED11899E:FC6E1CA6AA2DB634]:0) + at java.lang.ProcessBuilder.start(ProcessBuilder.java:1048) + at java.lang.Runtime.exec(Runtime.java:620) + ... + Caused by: java.io.IOException: error=13, Permission denied + at java.lang.UNIXProcess.forkAndExec(Native Method) + at java.lang.UNIXProcess.(UNIXProcess.java:248) + at java.lang.ProcessImpl.start(ProcessImpl.java:134) + at java.lang.ProcessBuilder.start(ProcessBuilder.java:1029) + ... + */ + } + } + + // make sure thread inherits this too (its documented that way) + public void testNoExecutionFromThread() throws Exception { + Thread t = new Thread() { + @Override + public void run() { + try { + Runtime.getRuntime().exec("ls"); + fail("should not have been able to execute!"); + } catch (Exception expected) { + // ok + } + } + }; + t.start(); + t.join(); + } +} diff --git a/core/src/test/java/org/elasticsearch/index/search/geo/GeoDistanceTests.java b/core/src/test/java/org/elasticsearch/common/geo/GeoDistanceTests.java similarity index 60% rename from core/src/test/java/org/elasticsearch/index/search/geo/GeoDistanceTests.java rename to core/src/test/java/org/elasticsearch/common/geo/GeoDistanceTests.java index fdb11a0cfd3..924926b9b30 100644 --- a/core/src/test/java/org/elasticsearch/index/search/geo/GeoDistanceTests.java +++ b/core/src/test/java/org/elasticsearch/common/geo/GeoDistanceTests.java @@ -16,22 +16,58 @@ * specific language governing permissions and limitations * under the License. */ +package org.elasticsearch.common.geo; -package org.elasticsearch.index.search.geo; - -import org.elasticsearch.common.geo.GeoDistance; -import org.elasticsearch.common.geo.GeoPoint; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.test.ESTestCase; import org.junit.Test; -import static org.hamcrest.MatcherAssert.assertThat; -import static org.hamcrest.Matchers.*; +import java.io.IOException; + +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.lessThan; +import static org.hamcrest.Matchers.equalTo; /** + * Basic Tests for {@link GeoDistance} */ public class GeoDistanceTests extends ESTestCase { + @Test + public void testGeoDistanceSerialization() throws IOException { + // make sure that ordinals don't change, because we rely on then in serialization + assertThat(GeoDistance.PLANE.ordinal(), equalTo(0)); + assertThat(GeoDistance.FACTOR.ordinal(), equalTo(1)); + assertThat(GeoDistance.ARC.ordinal(), equalTo(2)); + assertThat(GeoDistance.SLOPPY_ARC.ordinal(), equalTo(3)); + assertThat(GeoDistance.values().length, equalTo(4)); + + GeoDistance geoDistance = randomFrom(GeoDistance.PLANE, GeoDistance.FACTOR, GeoDistance.ARC, GeoDistance.SLOPPY_ARC); + try (BytesStreamOutput out = new BytesStreamOutput()) { + geoDistance.writeTo(out); + try (StreamInput in = StreamInput.wrap(out.bytes())) {; + GeoDistance copy = GeoDistance.readGeoDistanceFrom(in); + assertEquals(copy.toString() + " vs. " + geoDistance.toString(), copy, geoDistance); + } + } + } + + @Test(expected = IOException.class) + public void testInvalidReadFrom() throws Exception { + try (BytesStreamOutput out = new BytesStreamOutput()) { + if (randomBoolean()) { + out.writeVInt(randomIntBetween(GeoDistance.values().length, Integer.MAX_VALUE)); + } else { + out.writeVInt(randomIntBetween(Integer.MIN_VALUE, -1)); + } + try (StreamInput in = StreamInput.wrap(out.bytes())) { + GeoDistance.readGeoDistanceFrom(in); + } + } + } + @Test public void testDistanceCheck() { // Note, is within is an approximation, so, even though 0.52 is outside 50mi, we still get "true" diff --git a/core/src/test/java/org/elasticsearch/common/geo/ShapeRelationTests.java b/core/src/test/java/org/elasticsearch/common/geo/ShapeRelationTests.java new file mode 100644 index 00000000000..83b66719985 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/common/geo/ShapeRelationTests.java @@ -0,0 +1,92 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.geo; + +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.test.ESTestCase; +import org.junit.Test; + +import java.io.IOException; + +import static org.hamcrest.Matchers.equalTo; + +public class ShapeRelationTests extends ESTestCase { + + public void testValidOrdinals() { + assertThat(ShapeRelation.INTERSECTS.ordinal(), equalTo(0)); + assertThat(ShapeRelation.DISJOINT.ordinal(), equalTo(1)); + assertThat(ShapeRelation.WITHIN.ordinal(), equalTo(2)); + } + + public void testwriteTo() throws Exception { + try (BytesStreamOutput out = new BytesStreamOutput()) { + ShapeRelation.INTERSECTS.writeTo(out); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + assertThat(in.readVInt(), equalTo(0)); + } + } + + try (BytesStreamOutput out = new BytesStreamOutput()) { + ShapeRelation.DISJOINT.writeTo(out); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + assertThat(in.readVInt(), equalTo(1)); + } + } + + try (BytesStreamOutput out = new BytesStreamOutput()) { + ShapeRelation.WITHIN.writeTo(out); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + assertThat(in.readVInt(), equalTo(2)); + } + } + } + + public void testReadFrom() throws Exception { + try (BytesStreamOutput out = new BytesStreamOutput()) { + out.writeVInt(0); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + assertThat(ShapeRelation.DISJOINT.readFrom(in), equalTo(ShapeRelation.INTERSECTS)); + } + } + try (BytesStreamOutput out = new BytesStreamOutput()) { + out.writeVInt(1); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + assertThat(ShapeRelation.DISJOINT.readFrom(in), equalTo(ShapeRelation.DISJOINT)); + } + } + try (BytesStreamOutput out = new BytesStreamOutput()) { + out.writeVInt(2); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + assertThat(ShapeRelation.DISJOINT.readFrom(in), equalTo(ShapeRelation.WITHIN)); + } + } + } + + @Test(expected = IOException.class) + public void testInvalidReadFrom() throws Exception { + try (BytesStreamOutput out = new BytesStreamOutput()) { + out.writeVInt(randomIntBetween(3, Integer.MAX_VALUE)); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + ShapeRelation.DISJOINT.readFrom(in); + } + } + } +} diff --git a/core/src/test/java/org/elasticsearch/common/geo/SpatialStrategyTests.java b/core/src/test/java/org/elasticsearch/common/geo/SpatialStrategyTests.java new file mode 100644 index 00000000000..c53a3fb18cb --- /dev/null +++ b/core/src/test/java/org/elasticsearch/common/geo/SpatialStrategyTests.java @@ -0,0 +1,78 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.geo; + +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.test.ESTestCase; +import org.junit.Test; + +import java.io.IOException; + +import static org.hamcrest.Matchers.equalTo; + +public class SpatialStrategyTests extends ESTestCase { + + public void testValidOrdinals() { + assertThat(SpatialStrategy.TERM.ordinal(), equalTo(0)); + assertThat(SpatialStrategy.RECURSIVE.ordinal(), equalTo(1)); + } + + public void testwriteTo() throws Exception { + try (BytesStreamOutput out = new BytesStreamOutput()) { + SpatialStrategy.TERM.writeTo(out); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + assertThat(in.readVInt(), equalTo(0)); + } + } + + try (BytesStreamOutput out = new BytesStreamOutput()) { + SpatialStrategy.RECURSIVE.writeTo(out); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + assertThat(in.readVInt(), equalTo(1)); + } + } + } + + public void testReadFrom() throws Exception { + try (BytesStreamOutput out = new BytesStreamOutput()) { + out.writeVInt(0); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + assertThat(SpatialStrategy.TERM.readFrom(in), equalTo(SpatialStrategy.TERM)); + } + } + try (BytesStreamOutput out = new BytesStreamOutput()) { + out.writeVInt(1); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + assertThat(SpatialStrategy.TERM.readFrom(in), equalTo(SpatialStrategy.RECURSIVE)); + } + } + } + + @Test(expected = IOException.class) + public void testInvalidReadFrom() throws Exception { + try (BytesStreamOutput out = new BytesStreamOutput()) { + out.writeVInt(randomIntBetween(2, Integer.MAX_VALUE)); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + SpatialStrategy.TERM.readFrom(in); + } + } + } +} diff --git a/core/src/test/java/org/elasticsearch/common/io/stream/BytesStreamsTests.java b/core/src/test/java/org/elasticsearch/common/io/stream/BytesStreamsTests.java index d313dd71d81..afc17ce004b 100644 --- a/core/src/test/java/org/elasticsearch/common/io/stream/BytesStreamsTests.java +++ b/core/src/test/java/org/elasticsearch/common/io/stream/BytesStreamsTests.java @@ -26,6 +26,7 @@ import org.elasticsearch.test.ESTestCase; import org.junit.Test; import java.io.IOException; + import java.util.Objects; import static org.hamcrest.Matchers.closeTo; diff --git a/core/src/test/java/org/elasticsearch/common/path/PathTrieTests.java b/core/src/test/java/org/elasticsearch/common/path/PathTrieTests.java index d3d6fd3495c..aec4fb24888 100644 --- a/core/src/test/java/org/elasticsearch/common/path/PathTrieTests.java +++ b/core/src/test/java/org/elasticsearch/common/path/PathTrieTests.java @@ -25,6 +25,7 @@ import org.junit.Test; import java.util.HashMap; import java.util.Map; +import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.nullValue; @@ -33,7 +34,6 @@ import static org.hamcrest.Matchers.nullValue; */ public class PathTrieTests extends ESTestCase { - @Test public void testPath() { PathTrie trie = new PathTrie<>(); trie.insert("/a/b/c", "walla"); @@ -61,14 +61,12 @@ public class PathTrieTests extends ESTestCase { assertThat(params.get("docId"), equalTo("12")); } - @Test public void testEmptyPath() { PathTrie trie = new PathTrie<>(); trie.insert("/", "walla"); assertThat(trie.retrieve(""), equalTo("walla")); } - @Test public void testDifferentNamesOnDifferentPath() { PathTrie trie = new PathTrie<>(); trie.insert("/a/{type}", "test1"); @@ -83,7 +81,6 @@ public class PathTrieTests extends ESTestCase { assertThat(params.get("name"), equalTo("testX")); } - @Test public void testSameNameOnDifferentPath() { PathTrie trie = new PathTrie<>(); trie.insert("/a/c/{name}", "test1"); @@ -98,7 +95,6 @@ public class PathTrieTests extends ESTestCase { assertThat(params.get("name"), equalTo("testX")); } - @Test public void testPreferNonWildcardExecution() { PathTrie trie = new PathTrie<>(); trie.insert("{test}", "test1"); @@ -115,7 +111,6 @@ public class PathTrieTests extends ESTestCase { assertThat(trie.retrieve("/v/x/c", params), equalTo("test6")); } - @Test public void testSamePathConcreteResolution() { PathTrie trie = new PathTrie<>(); trie.insert("{x}/{y}/{z}", "test1"); @@ -132,7 +127,6 @@ public class PathTrieTests extends ESTestCase { assertThat(params.get("k"), equalTo("c")); } - @Test public void testNamedWildcardAndLookupWithWildcard() { PathTrie trie = new PathTrie<>(); trie.insert("x/{test}", "test1"); @@ -161,4 +155,25 @@ public class PathTrieTests extends ESTestCase { assertThat(trie.retrieve("a/*/_endpoint", params), equalTo("test5")); assertThat(params.get("test"), equalTo("*")); } + + public void testSplitPath() { + PathTrie trie = new PathTrie<>(); + assertThat(trie.splitPath("/a/"), arrayContaining("a")); + assertThat(trie.splitPath("/a/b"),arrayContaining("a", "b")); + assertThat(trie.splitPath("/a/b/c"), arrayContaining("a", "b", "c")); + assertThat(trie.splitPath("/a/b/"), arrayContaining("a", "b", "")); + assertThat(trie.splitPath("/a/b//d"), arrayContaining("a", "b", "", "d")); + + assertThat(trie.splitPath("//_search"), arrayContaining("", "_search")); + assertThat(trie.splitPath("//_search"), arrayContaining("", "_search")); + assertThat(trie.splitPath("//_search"), arrayContaining("", "_search")); + assertThat(trie.splitPath("//_search"), arrayContaining("", "_search")); + assertThat(trie.splitPath("//log/_search"), arrayContaining("", "log", "_search")); + + assertThat(trie.splitPath("/,/_search"), arrayContaining(",", "_search")); + assertThat(trie.splitPath("/,/_search"), arrayContaining(",", "_search")); + assertThat(trie.splitPath("/,/_search"), arrayContaining(",", "_search")); + assertThat(trie.splitPath("/,/_search"), arrayContaining(",", "_search")); + } + } diff --git a/core/src/test/java/org/elasticsearch/common/unit/FuzzinessTests.java b/core/src/test/java/org/elasticsearch/common/unit/FuzzinessTests.java index 234e341f4c9..807b4a72bf0 100644 --- a/core/src/test/java/org/elasticsearch/common/unit/FuzzinessTests.java +++ b/core/src/test/java/org/elasticsearch/common/unit/FuzzinessTests.java @@ -18,6 +18,8 @@ */ package org.elasticsearch.common.unit; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.xcontent.XContent; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; @@ -162,4 +164,29 @@ public class FuzzinessTests extends ESTestCase { } } + @Test + public void testSerialization() throws IOException { + Fuzziness fuzziness = Fuzziness.AUTO; + Fuzziness deserializedFuzziness = doSerializeRoundtrip(fuzziness); + assertEquals(fuzziness, deserializedFuzziness); + + fuzziness = Fuzziness.fromEdits(randomIntBetween(0, 2)); + deserializedFuzziness = doSerializeRoundtrip(fuzziness); + assertEquals(fuzziness, deserializedFuzziness); + } + + @Test + public void testSerializationAuto() throws IOException { + Fuzziness fuzziness = Fuzziness.AUTO; + Fuzziness deserializedFuzziness = doSerializeRoundtrip(fuzziness); + assertEquals(fuzziness, deserializedFuzziness); + assertEquals(fuzziness.asInt(), deserializedFuzziness.asInt()); + } + + private static Fuzziness doSerializeRoundtrip(Fuzziness in) throws IOException { + BytesStreamOutput output = new BytesStreamOutput(); + in.writeTo(output); + StreamInput streamInput = StreamInput.wrap(output.bytes()); + return Fuzziness.readFuzzinessFrom(streamInput); + } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalValuesMapperIntegrationIT.java b/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalValuesMapperIntegrationIT.java index 42a9df6632e..6d28f2daaff 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalValuesMapperIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalValuesMapperIntegrationIT.java @@ -22,14 +22,12 @@ package org.elasticsearch.index.mapper.externalvalues; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.geo.ShapeRelation; import org.elasticsearch.common.geo.builders.ShapeBuilder; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; import org.junit.Test; -import java.util.Arrays; import java.util.Collection; import static org.hamcrest.Matchers.equalTo; @@ -68,13 +66,13 @@ public class ExternalValuesMapperIntegrationIT extends ESIntegTestCase { assertThat(response.getHits().totalHits(), equalTo((long) 1)); response = client().prepareSearch("test-idx") - .setPostFilter(QueryBuilders.geoDistanceRangeQuery("field.point").point(42.0, 51.0).to("1km")) + .setPostFilter(QueryBuilders.geoDistanceRangeQuery("field.point", 42.0, 51.0).to("1km")) .execute().actionGet(); assertThat(response.getHits().totalHits(), equalTo((long) 1)); response = client().prepareSearch("test-idx") - .setPostFilter(QueryBuilders.geoShapeQuery("field.shape", ShapeBuilder.newPoint(-100, 45), ShapeRelation.WITHIN)) + .setPostFilter(QueryBuilders.geoShapeQuery("field.shape", ShapeBuilder.newPoint(-100, 45)).relation(ShapeRelation.WITHIN)) .execute().actionGet(); assertThat(response.getHits().totalHits(), equalTo((long) 1)); diff --git a/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java b/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java new file mode 100644 index 00000000000..da32e38023c --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java @@ -0,0 +1,706 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query; + +import com.carrotsearch.randomizedtesting.generators.CodepointSetGenerator; +import org.apache.lucene.search.Query; +import org.elasticsearch.Version; +import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; +import org.elasticsearch.action.get.GetRequest; +import org.elasticsearch.action.get.GetResponse; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.action.termvectors.MultiTermVectorsRequest; +import org.elasticsearch.action.termvectors.MultiTermVectorsResponse; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.ClusterService; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.common.ParseFieldMatcher; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.inject.AbstractModule; +import org.elasticsearch.common.inject.Injector; +import org.elasticsearch.common.inject.ModulesBuilder; +import org.elasticsearch.common.inject.multibindings.Multibinder; +import org.elasticsearch.common.inject.util.Providers; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsModule; +import org.elasticsearch.common.unit.Fuzziness; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.env.Environment; +import org.elasticsearch.env.EnvironmentModule; +import org.elasticsearch.index.Index; +import org.elasticsearch.index.IndexNameModule; +import org.elasticsearch.index.analysis.AnalysisModule; +import org.elasticsearch.index.cache.IndexCacheModule; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.query.functionscore.ScoreFunctionParser; +import org.elasticsearch.index.query.functionscore.ScoreFunctionParserMapper; +import org.elasticsearch.index.query.support.QueryParsers; +import org.elasticsearch.index.settings.IndexSettingsModule; +import org.elasticsearch.index.similarity.SimilarityModule; +import org.elasticsearch.indices.IndicesModule; +import org.elasticsearch.indices.analysis.IndicesAnalysisService; +import org.elasticsearch.indices.breaker.CircuitBreakerService; +import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; +import org.elasticsearch.script.*; +import org.elasticsearch.script.mustache.MustacheScriptEngineService; +import org.elasticsearch.search.internal.SearchContext; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.TestSearchContext; +import org.elasticsearch.test.VersionUtils; +import org.elasticsearch.test.cluster.TestClusterService; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.threadpool.ThreadPoolModule; +import org.joda.time.DateTime; +import org.joda.time.DateTimeZone; +import org.junit.*; + +import java.io.IOException; +import java.lang.reflect.InvocationHandler; +import java.lang.reflect.Method; +import java.lang.reflect.Proxy; +import java.util.*; +import java.util.concurrent.ExecutionException; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.not; + +public abstract class AbstractQueryTestCase> extends ESTestCase { + + private static final GeohashGenerator geohashGenerator = new GeohashGenerator(); + protected static final String STRING_FIELD_NAME = "mapped_string"; + protected static final String STRING_FIELD_NAME_2 = "mapped_string_2"; + protected static final String INT_FIELD_NAME = "mapped_int"; + protected static final String DOUBLE_FIELD_NAME = "mapped_double"; + protected static final String BOOLEAN_FIELD_NAME = "mapped_boolean"; + protected static final String DATE_FIELD_NAME = "mapped_date"; + protected static final String OBJECT_FIELD_NAME = "mapped_object"; + protected static final String GEO_POINT_FIELD_NAME = "mapped_geo_point"; + protected static final String GEO_SHAPE_FIELD_NAME = "mapped_geo_shape"; + protected static final String[] MAPPED_FIELD_NAMES = new String[] { STRING_FIELD_NAME, INT_FIELD_NAME, DOUBLE_FIELD_NAME, + BOOLEAN_FIELD_NAME, DATE_FIELD_NAME, OBJECT_FIELD_NAME, GEO_POINT_FIELD_NAME, GEO_SHAPE_FIELD_NAME }; + protected static final String[] MAPPED_LEAF_FIELD_NAMES = new String[] { STRING_FIELD_NAME, INT_FIELD_NAME, DOUBLE_FIELD_NAME, + BOOLEAN_FIELD_NAME, DATE_FIELD_NAME, GEO_POINT_FIELD_NAME }; + + private static Injector injector; + private static IndexQueryParserService queryParserService; + + protected static IndexQueryParserService queryParserService() { + return queryParserService; + } + + private static Index index; + + protected static Index getIndex() { + return index; + } + + private static String[] currentTypes; + + protected static String[] getCurrentTypes() { + return currentTypes; + } + + private static NamedWriteableRegistry namedWriteableRegistry; + + private static String[] randomTypes; + private static ClientInvocationHandler clientInvocationHandler = new ClientInvocationHandler(); + + /** + * Setup for the whole base test class. + */ + @BeforeClass + public static void init() throws IOException { + // we have to prefer CURRENT since with the range of versions we support it's rather unlikely to get the current actually. + Version version = randomBoolean() ? Version.CURRENT : VersionUtils.randomVersionBetween(random(), Version.V_2_0_0_beta1, Version.CURRENT); + Settings settings = Settings.settingsBuilder() + .put("name", AbstractQueryTestCase.class.toString()) + .put("path.home", createTempDir()) + .build(); + Settings indexSettings = Settings.settingsBuilder() + .put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); + index = new Index(randomAsciiOfLengthBetween(1, 10)); + final TestClusterService clusterService = new TestClusterService(); + clusterService.setState(new ClusterState.Builder(clusterService.state()).metaData(new MetaData.Builder().put( + new IndexMetaData.Builder(index.name()).settings(indexSettings).numberOfShards(1).numberOfReplicas(0)))); + final Client proxy = (Client) Proxy.newProxyInstance( + Client.class.getClassLoader(), + new Class[]{Client.class}, + clientInvocationHandler); + injector = new ModulesBuilder().add( + new EnvironmentModule(new Environment(settings)), + new SettingsModule(settings), + new ThreadPoolModule(new ThreadPool(settings)), + new IndicesModule(settings) { + @Override + public void configure() { + // skip services + bindQueryParsersExtension(); + } + }, + new ScriptModule(settings) { + @Override + protected void configure() { + Settings settings = Settings.builder() + .put("path.home", createTempDir()) + // no file watching, so we don't need a ResourceWatcherService + .put(ScriptService.SCRIPT_AUTO_RELOAD_ENABLED_SETTING, false) + .build(); + MockScriptEngine mockScriptEngine = new MockScriptEngine(); + Multibinder multibinder = Multibinder.newSetBinder(binder(), ScriptEngineService.class); + multibinder.addBinding().toInstance(mockScriptEngine); + try { + Class.forName("com.github.mustachejava.Mustache"); + } catch(ClassNotFoundException e) { + throw new IllegalStateException("error while loading mustache", e); + } + MustacheScriptEngineService mustacheScriptEngineService = new MustacheScriptEngineService(settings); + Set engines = new HashSet<>(); + engines.add(mockScriptEngine); + engines.add(mustacheScriptEngineService); + List customContexts = new ArrayList<>(); + bind(ScriptContextRegistry.class).toInstance(new ScriptContextRegistry(customContexts)); + try { + ScriptService scriptService = new ScriptService(settings, new Environment(settings), engines, null, new ScriptContextRegistry(customContexts)); + bind(ScriptService.class).toInstance(scriptService); + } catch(IOException e) { + throw new IllegalStateException("error while binding ScriptService", e); + } + + + } + }, + new IndexSettingsModule(index, indexSettings), + new IndexCacheModule(indexSettings), + new AnalysisModule(indexSettings, new IndicesAnalysisService(indexSettings)), + new SimilarityModule(indexSettings), + new IndexNameModule(index), + new AbstractModule() { + @Override + protected void configure() { + bind(Client.class).toInstance(proxy); + Multibinder.newSetBinder(binder(), ScoreFunctionParser.class); + bind(ScoreFunctionParserMapper.class).asEagerSingleton(); + bind(ClusterService.class).toProvider(Providers.of(clusterService)); + bind(CircuitBreakerService.class).to(NoneCircuitBreakerService.class); + bind(NamedWriteableRegistry.class).asEagerSingleton(); + } + } + ).createInjector(); + queryParserService = injector.getInstance(IndexQueryParserService.class); + + MapperService mapperService = queryParserService.mapperService; + //create some random type with some default field, those types will stick around for all of the subclasses + currentTypes = new String[randomIntBetween(0, 5)]; + for (int i = 0; i < currentTypes.length; i++) { + String type = randomAsciiOfLengthBetween(1, 10); + mapperService.merge(type, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(type, + STRING_FIELD_NAME, "type=string", + STRING_FIELD_NAME_2, "type=string", + INT_FIELD_NAME, "type=integer", + DOUBLE_FIELD_NAME, "type=double", + BOOLEAN_FIELD_NAME, "type=boolean", + DATE_FIELD_NAME, "type=date", + OBJECT_FIELD_NAME, "type=object", + GEO_POINT_FIELD_NAME, "type=geo_point,lat_lon=true,geohash=true,geohash_prefix=true", + GEO_SHAPE_FIELD_NAME, "type=geo_shape" + ).string()), false, false); + // also add mappings for two inner field in the object field + mapperService.merge(type, new CompressedXContent("{\"properties\":{\""+OBJECT_FIELD_NAME+"\":{\"type\":\"object\"," + + "\"properties\":{\""+DATE_FIELD_NAME+"\":{\"type\":\"date\"},\""+INT_FIELD_NAME+"\":{\"type\":\"integer\"}}}}}"), false, false); + currentTypes[i] = type; + } + namedWriteableRegistry = injector.getInstance(NamedWriteableRegistry.class); + } + + @AfterClass + public static void afterClass() throws Exception { + terminate(injector.getInstance(ThreadPool.class)); + injector = null; + index = null; + queryParserService = null; + currentTypes = null; + namedWriteableRegistry = null; + randomTypes = null; + } + + @Before + public void beforeTest() { + clientInvocationHandler.delegate = this; + //set some random types to be queried as part the search request, before each test + randomTypes = getRandomTypes(); + } + + protected void setSearchContext(String[] types) { + TestSearchContext testSearchContext = new TestSearchContext(); + testSearchContext.setTypes(types); + SearchContext.setCurrent(testSearchContext); + } + + @After + public void afterTest() { + clientInvocationHandler.delegate = null; + QueryShardContext.removeTypes(); + SearchContext.removeCurrent(); + } + + protected final QB createTestQueryBuilder() { + QB query = doCreateTestQueryBuilder(); + //we should not set boost and query name for queries that don't parse it + if (supportsBoostAndQueryName()) { + if (randomBoolean()) { + query.boost(2.0f / randomIntBetween(1, 20)); + } + if (randomBoolean()) { + query.queryName(randomAsciiOfLengthBetween(1, 10)); + } + } + return query; + } + + /** + * Create the query that is being tested + */ + protected abstract QB doCreateTestQueryBuilder(); + + /** + * Generic test that creates new query from the test query and checks both for equality + * and asserts equality on the two queries. + */ + @Test + public void testFromXContent() throws IOException { + QB testQuery = createTestQueryBuilder(); + assertParsedQuery(testQuery.toString(), testQuery); + for (Map.Entry alternateVersion : getAlternateVersions().entrySet()) { + assertParsedQuery(alternateVersion.getKey(), alternateVersion.getValue()); + } + } + + /** + * Returns alternate string representation of the query that need to be tested as they are never used as output + * of {@link QueryBuilder#toXContent(XContentBuilder, ToXContent.Params)}. By default there are no alternate versions. + */ + protected Map getAlternateVersions() { + return Collections.emptyMap(); + } + + /** + * Parses the query provided as string argument and compares it with the expected result provided as argument as a {@link QueryBuilder} + */ + protected final void assertParsedQuery(String queryAsString, QueryBuilder expectedQuery) throws IOException { + assertParsedQuery(queryAsString, expectedQuery, ParseFieldMatcher.STRICT); + } + + protected final void assertParsedQuery(String queryAsString, QueryBuilder expectedQuery, ParseFieldMatcher matcher) throws IOException { + QueryBuilder newQuery = parseQuery(queryAsString, matcher); + assertNotSame(newQuery, expectedQuery); + assertEquals(expectedQuery, newQuery); + assertEquals(expectedQuery.hashCode(), newQuery.hashCode()); + } + + protected final QueryBuilder parseQuery(String queryAsString) throws IOException { + return parseQuery(queryAsString, ParseFieldMatcher.STRICT); + } + + protected final QueryBuilder parseQuery(String queryAsString, ParseFieldMatcher matcher) throws IOException { + XContentParser parser = XContentFactory.xContent(queryAsString).createParser(queryAsString); + return parseQuery(parser, matcher); + } + + protected final QueryBuilder parseQuery(BytesReference query) throws IOException { + XContentParser parser = XContentFactory.xContent(query).createParser(query); + return parseQuery(parser, ParseFieldMatcher.STRICT); + } + + protected final QueryBuilder parseQuery(XContentParser parser, ParseFieldMatcher matcher) throws IOException { + QueryParseContext context = createParseContext(); + context.reset(parser); + context.parseFieldMatcher(matcher); + return context.parseInnerQueryBuilder(); + } + + /** + * Test creates the {@link Query} from the {@link QueryBuilder} under test and delegates the + * assertions being made on the result to the implementing subclass. + */ + @Test + public void testToQuery() throws IOException { + QueryShardContext context = createShardContext(); + context.setAllowUnmappedFields(true); + + QB firstQuery = createTestQueryBuilder(); + setSearchContext(randomTypes); // only set search context for toQuery to be more realistic + Query firstLuceneQuery = firstQuery.toQuery(context); + assertLuceneQuery(firstQuery, firstLuceneQuery, context); + SearchContext.removeCurrent(); // remove after assertLuceneQuery since the assertLuceneQuery impl might access the context as well + + + QB secondQuery = copyQuery(firstQuery); + //query _name never should affect the result of toQuery, we randomly set it to make sure + if (randomBoolean()) { + secondQuery.queryName(secondQuery.queryName() == null ? randomAsciiOfLengthBetween(1, 30) : secondQuery.queryName() + randomAsciiOfLengthBetween(1, 10)); + } + setSearchContext(randomTypes); // only set search context for toQuery to be more realistic + Query secondLuceneQuery = secondQuery.toQuery(context); + assertLuceneQuery(secondQuery, secondLuceneQuery, context); + SearchContext.removeCurrent(); // remove after assertLuceneQuery since the assertLuceneQuery impl might access the context as well + + assertThat("two equivalent query builders lead to different lucene queries", secondLuceneQuery, equalTo(firstLuceneQuery)); + + //if the initial lucene query is null, changing its boost won't have any effect, we shouldn't test that + if (firstLuceneQuery != null && supportsBoostAndQueryName()) { + secondQuery.boost(firstQuery.boost() + 1f + randomFloat()); + setSearchContext(randomTypes); // only set search context for toQuery to be more realistic + Query thirdLuceneQuery = secondQuery.toQuery(context); + SearchContext.removeCurrent(); + assertThat("modifying the boost doesn't affect the corresponding lucene query", firstLuceneQuery, not(equalTo(thirdLuceneQuery))); + } + } + + /** + * Few queries allow you to set the boost and queryName on the java api, although the corresponding parser doesn't parse them as they are not supported. + * This method allows to disable boost and queryName related tests for those queries. Those queries are easy to identify: their parsers + * don't parse `boost` and `_name` as they don't apply to the specific query: filter query, wrapper query and match_none + */ + protected boolean supportsBoostAndQueryName() { + return true; + } + + /** + * Checks the result of {@link QueryBuilder#toQuery(QueryShardContext)} given the original {@link QueryBuilder} and {@link QueryShardContext}. + * Verifies that named queries and boost are properly handled and delegates to {@link #doAssertLuceneQuery(AbstractQueryBuilder, Query, QueryShardContext)} + * for query specific checks. + */ + protected final void assertLuceneQuery(QB queryBuilder, Query query, QueryShardContext context) throws IOException { + if (queryBuilder.queryName() != null) { + Query namedQuery = context.copyNamedQueries().get(queryBuilder.queryName()); + assertThat(namedQuery, equalTo(query)); + } + if (query != null) { + assertBoost(queryBuilder, query); + } + doAssertLuceneQuery(queryBuilder, query, context); + } + + /** + * Allows to override boost assertions for queries that don't have the default behaviour + */ + protected void assertBoost(QB queryBuilder, Query query) throws IOException { + assertThat(query.getBoost(), equalTo(queryBuilder.boost())); + } + + /** + * Checks the result of {@link QueryBuilder#toQuery(QueryShardContext)} given the original {@link QueryBuilder} and {@link QueryShardContext}. + * Contains the query specific checks to be implemented by subclasses. + */ + protected abstract void doAssertLuceneQuery(QB queryBuilder, Query query, QueryShardContext context) throws IOException; + + /** + * Test serialization and deserialization of the test query. + */ + @Test + public void testSerialization() throws IOException { + QB testQuery = createTestQueryBuilder(); + assertSerialization(testQuery); + } + + /** + * Serialize the given query builder and asserts that both are equal + */ + @SuppressWarnings("unchecked") + protected QB assertSerialization(QB testQuery) throws IOException { + try (BytesStreamOutput output = new BytesStreamOutput()) { + testQuery.writeTo(output); + try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), namedWriteableRegistry)) { + QueryBuilder prototype = queryParser(testQuery.getName()).getBuilderPrototype(); + QueryBuilder deserializedQuery = prototype.readFrom(in); + assertEquals(deserializedQuery, testQuery); + assertEquals(deserializedQuery.hashCode(), testQuery.hashCode()); + assertNotSame(deserializedQuery, testQuery); + return (QB) deserializedQuery; + } + } + } + + @Test + public void testEqualsAndHashcode() throws IOException { + QB firstQuery = createTestQueryBuilder(); + assertFalse("query is equal to null", firstQuery.equals(null)); + assertFalse("query is equal to incompatible type", firstQuery.equals("")); + assertTrue("query is not equal to self", firstQuery.equals(firstQuery)); + assertThat("same query's hashcode returns different values if called multiple times", firstQuery.hashCode(), equalTo(firstQuery.hashCode())); + + QB secondQuery = copyQuery(firstQuery); + assertTrue("query is not equal to self", secondQuery.equals(secondQuery)); + assertTrue("query is not equal to its copy", firstQuery.equals(secondQuery)); + assertTrue("equals is not symmetric", secondQuery.equals(firstQuery)); + assertThat("query copy's hashcode is different from original hashcode", secondQuery.hashCode(), equalTo(firstQuery.hashCode())); + + QB thirdQuery = copyQuery(secondQuery); + assertTrue("query is not equal to self", thirdQuery.equals(thirdQuery)); + assertTrue("query is not equal to its copy", secondQuery.equals(thirdQuery)); + assertThat("query copy's hashcode is different from original hashcode", secondQuery.hashCode(), equalTo(thirdQuery.hashCode())); + assertTrue("equals is not transitive", firstQuery.equals(thirdQuery)); + assertThat("query copy's hashcode is different from original hashcode", firstQuery.hashCode(), equalTo(thirdQuery.hashCode())); + assertTrue("equals is not symmetric", thirdQuery.equals(secondQuery)); + assertTrue("equals is not symmetric", thirdQuery.equals(firstQuery)); + + if (randomBoolean()) { + secondQuery.queryName(secondQuery.queryName() == null ? randomAsciiOfLengthBetween(1, 30) : secondQuery.queryName() + randomAsciiOfLengthBetween(1, 10)); + } else { + secondQuery.boost(firstQuery.boost() + 1f + randomFloat()); + } + assertThat("different queries should not be equal", secondQuery, not(equalTo(firstQuery))); + assertThat("different queries should have different hashcode", secondQuery.hashCode(), not(equalTo(firstQuery.hashCode()))); + } + + private QueryParser queryParser(String queryId) { + return queryParserService.indicesQueriesRegistry().queryParsers().get(queryId); + } + + //we use the streaming infra to create a copy of the query provided as argument + protected QB copyQuery(QB query) throws IOException { + try (BytesStreamOutput output = new BytesStreamOutput()) { + query.writeTo(output); + try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), namedWriteableRegistry)) { + QueryBuilder prototype = queryParser(query.getName()).getBuilderPrototype(); + @SuppressWarnings("unchecked") + QB secondQuery = (QB)prototype.readFrom(in); + return secondQuery; + } + } + } + + /** + * @return a new {@link QueryShardContext} based on the base test index and queryParserService + */ + protected static QueryShardContext createShardContext() { + QueryShardContext queryCreationContext = new QueryShardContext(index, queryParserService); + queryCreationContext.reset(); + queryCreationContext.parseFieldMatcher(ParseFieldMatcher.STRICT); + return queryCreationContext; + } + + /** + * @return a new {@link QueryParseContext} based on the base test index and queryParserService + */ + protected static QueryParseContext createParseContext() { + QueryParseContext queryParseContext = new QueryParseContext(queryParserService.indicesQueriesRegistry()); + queryParseContext.reset(null); + queryParseContext.parseFieldMatcher(ParseFieldMatcher.STRICT); + return queryParseContext; + } + + /** + * create a random value for either {@link AbstractQueryTestCase#BOOLEAN_FIELD_NAME}, {@link AbstractQueryTestCase#INT_FIELD_NAME}, + * {@link AbstractQueryTestCase#DOUBLE_FIELD_NAME}, {@link AbstractQueryTestCase#STRING_FIELD_NAME} or + * {@link AbstractQueryTestCase#DATE_FIELD_NAME}, or a String value by default + */ + protected static Object getRandomValueForFieldName(String fieldName) { + Object value; + switch (fieldName) { + case STRING_FIELD_NAME: + value = rarely() ? randomUnicodeOfLength(10) : randomAsciiOfLengthBetween(1, 10); // unicode in 10% cases + break; + case INT_FIELD_NAME: + value = randomIntBetween(0, 10); + break; + case DOUBLE_FIELD_NAME: + value = randomDouble() * 10; + break; + case BOOLEAN_FIELD_NAME: + value = randomBoolean(); + break; + case DATE_FIELD_NAME: + value = new DateTime(System.currentTimeMillis(), DateTimeZone.UTC).toString(); + break; + default: + value = randomAsciiOfLengthBetween(1, 10); + } + return value; + } + + protected static String getRandomQueryText() { + int terms = randomIntBetween(0, 3); + StringBuilder builder = new StringBuilder(); + for (int i = 0; i < terms; i++) { + builder.append(randomAsciiOfLengthBetween(1, 10) + " "); + } + return builder.toString().trim(); + } + + /** + * Helper method to return a mapped or a random field + */ + protected String getRandomFieldName() { + // if no type is set then return a random field name + if (currentTypes == null || currentTypes.length == 0 || randomBoolean()) { + return randomAsciiOfLengthBetween(1, 10); + } + return randomFrom(MAPPED_LEAF_FIELD_NAMES); + } + + /** + * Helper method to return a random field (mapped or unmapped) and a value + */ + protected Tuple getRandomFieldNameAndValue() { + String fieldName = getRandomFieldName(); + return new Tuple<>(fieldName, getRandomValueForFieldName(fieldName)); + } + + /** + * Helper method to return a random rewrite method + */ + protected static String getRandomRewriteMethod() { + String rewrite; + if (randomBoolean()) { + rewrite = randomFrom(QueryParsers.CONSTANT_SCORE, + QueryParsers.SCORING_BOOLEAN, + QueryParsers.CONSTANT_SCORE_BOOLEAN).getPreferredName(); + } else { + rewrite = randomFrom(QueryParsers.TOP_TERMS, + QueryParsers.TOP_TERMS_BOOST, + QueryParsers.TOP_TERMS_BLENDED_FREQS).getPreferredName() + "1"; + } + return rewrite; + } + + protected String[] getRandomTypes() { + String[] types; + if (currentTypes.length > 0 && randomBoolean()) { + int numberOfQueryTypes = randomIntBetween(1, currentTypes.length); + types = new String[numberOfQueryTypes]; + for (int i = 0; i < numberOfQueryTypes; i++) { + types[i] = randomFrom(currentTypes); + } + } else { + if (randomBoolean()) { + types = new String[] { MetaData.ALL }; + } else { + types = new String[0]; + } + } + return types; + } + + protected String getRandomType() { + return (currentTypes.length == 0) ? MetaData.ALL : randomFrom(currentTypes); + } + + public static String randomGeohash(int minPrecision, int maxPrecision) { + return geohashGenerator.ofStringLength(getRandom(), minPrecision, maxPrecision); + } + + public static class GeohashGenerator extends CodepointSetGenerator { + private final static char[] ASCII_SET = "0123456789bcdefghjkmnpqrstuvwxyz".toCharArray(); + + public GeohashGenerator() { + super(ASCII_SET); + } + } + + protected static Fuzziness randomFuzziness(String fieldName) { + if (randomBoolean()) { + return Fuzziness.fromEdits(randomIntBetween(0, 2)); + } + if (randomBoolean()) { + return Fuzziness.AUTO; + } + switch (fieldName) { + case INT_FIELD_NAME: + return Fuzziness.build(randomIntBetween(3, 100)); + case DOUBLE_FIELD_NAME: + return Fuzziness.build(1 + randomFloat() * 10); + case DATE_FIELD_NAME: + return Fuzziness.build(randomTimeValue()); + default: + return Fuzziness.AUTO; + } + } + + protected static boolean isNumericFieldName(String fieldName) { + return INT_FIELD_NAME.equals(fieldName) || DOUBLE_FIELD_NAME.equals(fieldName); + } + + protected static String randomAnalyzer() { + return randomFrom("simple", "standard", "keyword", "whitespace"); + } + + protected static String randomMinimumShouldMatch() { + return randomFrom("1", "-1", "75%", "-25%", "2<75%", "2<-25%"); + } + + protected static String randomTimeZone() { + return randomFrom(TIMEZONE_IDS); + } + + private static final List TIMEZONE_IDS = new ArrayList<>(DateTimeZone.getAvailableIDs()); + + private static class ClientInvocationHandler implements InvocationHandler { + AbstractQueryTestCase delegate; + @Override + public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { + if (method.equals(Client.class.getDeclaredMethod("get", GetRequest.class))) { + return new PlainActionFuture() { + @Override + public GetResponse get() throws InterruptedException, ExecutionException { + return delegate.executeGet((GetRequest) args[0]); + } + }; + } else if (method.equals(Client.class.getDeclaredMethod("multiTermVectors", MultiTermVectorsRequest.class))) { + return new PlainActionFuture() { + @Override + public MultiTermVectorsResponse get() throws InterruptedException, ExecutionException { + return delegate.executeMultiTermVectors((MultiTermVectorsRequest) args[0]); + } + }; + } else if (method.equals(Object.class.getDeclaredMethod("toString"))) { + return "MockClient"; + } + throw new UnsupportedOperationException("this test can't handle calls to: " + method); + } + + } + + /** + * Override this to handle {@link Client#get(GetRequest)} calls from parsers / builders + */ + protected GetResponse executeGet(GetRequest getRequest) { + throw new UnsupportedOperationException("this test can't handle GET requests"); + } + + /** + * Override this to handle {@link Client#get(GetRequest)} calls from parsers / builders + */ + protected MultiTermVectorsResponse executeMultiTermVectors(MultiTermVectorsRequest mtvRequest) { + throw new UnsupportedOperationException("this test can't handle MultiTermVector requests"); + } + +} diff --git a/core/src/test/java/org/elasticsearch/index/query/AbstractTermQueryTestCase.java b/core/src/test/java/org/elasticsearch/index/query/AbstractTermQueryTestCase.java new file mode 100644 index 00000000000..ccfe6190011 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/query/AbstractTermQueryTestCase.java @@ -0,0 +1,111 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query; + +import org.junit.Test; + +import java.util.HashMap; +import java.util.Map; + +public abstract class AbstractTermQueryTestCase> extends AbstractQueryTestCase { + + @Override + protected final QB doCreateTestQueryBuilder() { + String fieldName = null; + Object value; + switch (randomIntBetween(0, 3)) { + case 0: + if (randomBoolean()) { + fieldName = BOOLEAN_FIELD_NAME; + } + value = randomBoolean(); + break; + case 1: + if (randomBoolean()) { + fieldName = STRING_FIELD_NAME; + } + if (frequently()) { + value = randomAsciiOfLengthBetween(1, 10); + } else { + // generate unicode string in 10% of cases + value = randomUnicodeOfLength(10); + } + break; + case 2: + if (randomBoolean()) { + fieldName = INT_FIELD_NAME; + } + value = randomInt(10000); + break; + case 3: + if (randomBoolean()) { + fieldName = DOUBLE_FIELD_NAME; + } + value = randomDouble(); + break; + default: + throw new UnsupportedOperationException(); + } + + if (fieldName == null) { + fieldName = randomAsciiOfLengthBetween(1, 10); + } + return createQueryBuilder(fieldName, value); + } + + protected abstract QB createQueryBuilder(String fieldName, Object value); + + @Test + public void testIllegalArguments() throws QueryShardException { + try { + if (randomBoolean()) { + createQueryBuilder(null, randomAsciiOfLengthBetween(1, 30)); + } else { + createQueryBuilder("", randomAsciiOfLengthBetween(1, 30)); + } + fail("fieldname cannot be null or empty"); + } catch (IllegalArgumentException e) { + // expected + } + + try { + createQueryBuilder("field", null); + fail("value cannot be null or empty"); + } catch (IllegalArgumentException e) { + // expected + } + } + + @Override + protected Map getAlternateVersions() { + HashMap alternateVersions = new HashMap<>(); + QB tempQuery = createTestQueryBuilder(); + QB testQuery = createQueryBuilder(tempQuery.fieldName(), tempQuery.value()); + boolean isString = testQuery.value() instanceof String; + String value = (isString ? "\"" : "") + testQuery.value() + (isString ? "\"" : ""); + String contentString = "{\n" + + " \"" + testQuery.getName() + "\" : {\n" + + " \"" + testQuery.fieldName() + "\" : " + value + "\n" + + " }\n" + + "}"; + alternateVersions.put(contentString, testQuery); + return alternateVersions; + } +} diff --git a/core/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java new file mode 100644 index 00000000000..b86122d07dc --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java @@ -0,0 +1,205 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query; + +import org.apache.lucene.search.*; +import org.hamcrest.Matchers; +import org.junit.Test; + +import java.io.IOException; +import java.util.*; + +import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.elasticsearch.index.query.QueryBuilders.boolQuery; +import static org.elasticsearch.index.query.QueryBuilders.constantScoreQuery; +import static org.elasticsearch.index.query.QueryBuilders.termQuery; +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.instanceOf; + +public class BoolQueryBuilderTests extends AbstractQueryTestCase { + + @Override + protected BoolQueryBuilder doCreateTestQueryBuilder() { + BoolQueryBuilder query = new BoolQueryBuilder(); + if (randomBoolean()) { + query.adjustPureNegative(randomBoolean()); + } + if (randomBoolean()) { + query.disableCoord(randomBoolean()); + } + if (randomBoolean()) { + query.minimumNumberShouldMatch(randomMinimumShouldMatch()); + } + int mustClauses = randomIntBetween(0, 3); + for (int i = 0; i < mustClauses; i++) { + query.must(RandomQueryBuilder.createQuery(random())); + } + int mustNotClauses = randomIntBetween(0, 3); + for (int i = 0; i < mustNotClauses; i++) { + query.mustNot(RandomQueryBuilder.createQuery(random())); + } + int shouldClauses = randomIntBetween(0, 3); + for (int i = 0; i < shouldClauses; i++) { + query.should(RandomQueryBuilder.createQuery(random())); + } + int filterClauses = randomIntBetween(0, 3); + for (int i = 0; i < filterClauses; i++) { + query.filter(RandomQueryBuilder.createQuery(random())); + } + return query; + } + + @Override + protected void doAssertLuceneQuery(BoolQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { + if (!queryBuilder.hasClauses()) { + assertThat(query, instanceOf(MatchAllDocsQuery.class)); + } else { + List clauses = new ArrayList<>(); + clauses.addAll(getBooleanClauses(queryBuilder.must(), BooleanClause.Occur.MUST, context)); + clauses.addAll(getBooleanClauses(queryBuilder.mustNot(), BooleanClause.Occur.MUST_NOT, context)); + clauses.addAll(getBooleanClauses(queryBuilder.should(), BooleanClause.Occur.SHOULD, context)); + clauses.addAll(getBooleanClauses(queryBuilder.filter(), BooleanClause.Occur.FILTER, context)); + + if (clauses.isEmpty()) { + assertThat(query, instanceOf(MatchAllDocsQuery.class)); + } else { + assertThat(query, instanceOf(BooleanQuery.class)); + BooleanQuery booleanQuery = (BooleanQuery) query; + assertThat(booleanQuery.isCoordDisabled(), equalTo(queryBuilder.disableCoord())); + if (queryBuilder.adjustPureNegative()) { + boolean isNegative = true; + for (BooleanClause clause : clauses) { + if (clause.isProhibited() == false) { + isNegative = false; + break; + } + } + if (isNegative) { + clauses.add(new BooleanClause(new MatchAllDocsQuery(), BooleanClause.Occur.MUST)); + } + } + assertThat(booleanQuery.clauses().size(), equalTo(clauses.size())); + Iterator clauseIterator = clauses.iterator(); + for (BooleanClause booleanClause : booleanQuery.getClauses()) { + assertThat(booleanClause, equalTo(clauseIterator.next())); + } + } + } + } + + private static List getBooleanClauses(List queryBuilders, BooleanClause.Occur occur, QueryShardContext context) throws IOException { + List clauses = new ArrayList<>(); + for (QueryBuilder query : queryBuilders) { + Query innerQuery = query.toQuery(context); + if (innerQuery != null) { + clauses.add(new BooleanClause(innerQuery, occur)); + } + } + return clauses; + } + + @Override + protected Map getAlternateVersions() { + Map alternateVersions = new HashMap<>(); + BoolQueryBuilder tempQueryBuilder = createTestQueryBuilder(); + BoolQueryBuilder expectedQuery = new BoolQueryBuilder(); + String contentString = "{\n" + + " \"bool\" : {\n"; + if (tempQueryBuilder.must().size() > 0) { + QueryBuilder must = tempQueryBuilder.must().get(0); + contentString += "must: " + must.toString() + ","; + expectedQuery.must(must); + } + if (tempQueryBuilder.mustNot().size() > 0) { + QueryBuilder mustNot = tempQueryBuilder.mustNot().get(0); + contentString += (randomBoolean() ? "must_not: " : "mustNot: ") + mustNot.toString() + ","; + expectedQuery.mustNot(mustNot); + } + if (tempQueryBuilder.should().size() > 0) { + QueryBuilder should = tempQueryBuilder.should().get(0); + contentString += "should: " + should.toString() + ","; + expectedQuery.should(should); + } + if (tempQueryBuilder.filter().size() > 0) { + QueryBuilder filter = tempQueryBuilder.filter().get(0); + contentString += "filter: " + filter.toString() + ","; + expectedQuery.filter(filter); + } + contentString = contentString.substring(0, contentString.length() - 1); + contentString += " } \n" + "}"; + alternateVersions.put(contentString, expectedQuery); + return alternateVersions; + } + + @Test + public void testIllegalArguments() { + BoolQueryBuilder booleanQuery = new BoolQueryBuilder(); + + try { + booleanQuery.must(null); + fail("cannot be null"); + } catch (IllegalArgumentException e) { + } + + try { + booleanQuery.mustNot(null); + fail("cannot be null"); + } catch (IllegalArgumentException e) { + } + + try { + booleanQuery.filter(null); + fail("cannot be null"); + } catch (IllegalArgumentException e) { + } + + try { + booleanQuery.should(null); + fail("cannot be null"); + } catch (IllegalArgumentException e) { + } + } + + // https://github.com/elasticsearch/elasticsearch/issues/7240 + @Test + public void testEmptyBooleanQuery() throws Exception { + String query = jsonBuilder().startObject().startObject("bool").endObject().endObject().string(); + Query parsedQuery = parseQuery(query).toQuery(createShardContext()); + assertThat(parsedQuery, Matchers.instanceOf(MatchAllDocsQuery.class)); + } + + public void testDefaultMinShouldMatch() throws Exception { + // Queries have a minShouldMatch of 0 + BooleanQuery bq = (BooleanQuery) parseQuery(boolQuery().must(termQuery("foo", "bar")).buildAsBytes()).toQuery(createShardContext()); + assertEquals(0, bq.getMinimumNumberShouldMatch()); + + bq = (BooleanQuery) parseQuery(boolQuery().should(termQuery("foo", "bar")).buildAsBytes()).toQuery(createShardContext()); + assertEquals(0, bq.getMinimumNumberShouldMatch()); + + // Filters have a minShouldMatch of 0/1 + ConstantScoreQuery csq = (ConstantScoreQuery) parseQuery(constantScoreQuery(boolQuery().must(termQuery("foo", "bar"))).buildAsBytes()).toQuery(createShardContext()); + bq = (BooleanQuery) csq.getQuery(); + assertEquals(0, bq.getMinimumNumberShouldMatch()); + + csq = (ConstantScoreQuery) parseQuery(constantScoreQuery(boolQuery().should(termQuery("foo", "bar"))).buildAsBytes()).toQuery(createShardContext()); + bq = (BooleanQuery) csq.getQuery(); + assertEquals(1, bq.getMinimumNumberShouldMatch()); + } +} diff --git a/core/src/test/java/org/elasticsearch/index/query/BoostingQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/BoostingQueryBuilderTests.java new file mode 100644 index 00000000000..57fab99d51f --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/query/BoostingQueryBuilderTests.java @@ -0,0 +1,74 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query; + +import org.apache.lucene.queries.BoostingQuery; +import org.apache.lucene.search.Query; +import org.junit.Test; + +import java.io.IOException; + +import static org.hamcrest.CoreMatchers.instanceOf; +import static org.hamcrest.CoreMatchers.nullValue; + +public class BoostingQueryBuilderTests extends AbstractQueryTestCase { + + @Override + protected BoostingQueryBuilder doCreateTestQueryBuilder() { + BoostingQueryBuilder query = new BoostingQueryBuilder(RandomQueryBuilder.createQuery(random()), RandomQueryBuilder.createQuery(random())); + query.negativeBoost(2.0f / randomIntBetween(1, 20)); + return query; + } + + @Override + protected void doAssertLuceneQuery(BoostingQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { + Query positive = queryBuilder.positiveQuery().toQuery(context); + Query negative = queryBuilder.negativeQuery().toQuery(context); + if (positive == null || negative == null) { + assertThat(query, nullValue()); + } else { + assertThat(query, instanceOf(BoostingQuery.class)); + } + } + + @Test + public void testIllegalArguments() { + try { + new BoostingQueryBuilder(null, new MatchAllQueryBuilder()); + fail("must not be null"); + } catch (IllegalArgumentException e) { + // + } + + try { + new BoostingQueryBuilder(new MatchAllQueryBuilder(), null); + fail("must not be null"); + } catch (IllegalArgumentException e) { + // + } + + try { + new BoostingQueryBuilder(new MatchAllQueryBuilder(), new MatchAllQueryBuilder()).negativeBoost(-1.0f); + fail("must not be negative"); + } catch (IllegalArgumentException e) { + // + } + } +} diff --git a/core/src/test/java/org/elasticsearch/index/query/CombineFunctionTests.java b/core/src/test/java/org/elasticsearch/index/query/CombineFunctionTests.java new file mode 100644 index 00000000000..94d06097b09 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/query/CombineFunctionTests.java @@ -0,0 +1,130 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query; + +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.lucene.search.function.CombineFunction; +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.Matchers.equalTo; + +public class CombineFunctionTests extends ESTestCase { + + public void testValidOrdinals() { + assertThat(CombineFunction.MULTIPLY.ordinal(), equalTo(0)); + assertThat(CombineFunction.REPLACE.ordinal(), equalTo(1)); + assertThat(CombineFunction.SUM.ordinal(), equalTo(2)); + assertThat(CombineFunction.AVG.ordinal(), equalTo(3)); + assertThat(CombineFunction.MIN.ordinal(), equalTo(4)); + assertThat(CombineFunction.MAX.ordinal(), equalTo(5)); + } + + public void testWriteTo() throws Exception { + try (BytesStreamOutput out = new BytesStreamOutput()) { + CombineFunction.MULTIPLY.writeTo(out); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + assertThat(in.readVInt(), equalTo(0)); + } + } + + try (BytesStreamOutput out = new BytesStreamOutput()) { + CombineFunction.REPLACE.writeTo(out); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + assertThat(in.readVInt(), equalTo(1)); + } + } + + try (BytesStreamOutput out = new BytesStreamOutput()) { + CombineFunction.SUM.writeTo(out); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + assertThat(in.readVInt(), equalTo(2)); + } + } + + try (BytesStreamOutput out = new BytesStreamOutput()) { + CombineFunction.AVG.writeTo(out); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + assertThat(in.readVInt(), equalTo(3)); + } + } + try (BytesStreamOutput out = new BytesStreamOutput()) { + CombineFunction.MIN.writeTo(out); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + assertThat(in.readVInt(), equalTo(4)); + } + } + + try (BytesStreamOutput out = new BytesStreamOutput()) { + CombineFunction.MAX.writeTo(out); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + assertThat(in.readVInt(), equalTo(5)); + } + } + } + + public void testReadFrom() throws Exception { + try (BytesStreamOutput out = new BytesStreamOutput()) { + out.writeVInt(0); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + assertThat(CombineFunction.readCombineFunctionFrom(in), equalTo(CombineFunction.MULTIPLY)); + } + } + try (BytesStreamOutput out = new BytesStreamOutput()) { + out.writeVInt(1); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + assertThat(CombineFunction.readCombineFunctionFrom(in), equalTo(CombineFunction.REPLACE)); + } + } + try (BytesStreamOutput out = new BytesStreamOutput()) { + out.writeVInt(2); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + assertThat(CombineFunction.readCombineFunctionFrom(in), equalTo(CombineFunction.SUM)); + } + } + try (BytesStreamOutput out = new BytesStreamOutput()) { + out.writeVInt(3); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + assertThat(CombineFunction.readCombineFunctionFrom(in), equalTo(CombineFunction.AVG)); + } + } + try (BytesStreamOutput out = new BytesStreamOutput()) { + out.writeVInt(4); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + assertThat(CombineFunction.readCombineFunctionFrom(in), equalTo(CombineFunction.MIN)); + } + } + try (BytesStreamOutput out = new BytesStreamOutput()) { + out.writeVInt(5); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + assertThat(CombineFunction.readCombineFunctionFrom(in), equalTo(CombineFunction.MAX)); + } + } + } + + public void testFromString() { + assertThat(CombineFunction.fromString("multiply"), equalTo(CombineFunction.MULTIPLY)); + assertThat(CombineFunction.fromString("replace"), equalTo(CombineFunction.REPLACE)); + assertThat(CombineFunction.fromString("sum"), equalTo(CombineFunction.SUM)); + assertThat(CombineFunction.fromString("avg"), equalTo(CombineFunction.AVG)); + assertThat(CombineFunction.fromString("min"), equalTo(CombineFunction.MIN)); + assertThat(CombineFunction.fromString("max"), equalTo(CombineFunction.MAX)); + } +} diff --git a/core/src/test/java/org/elasticsearch/index/query/CommonTermsQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/CommonTermsQueryBuilderTests.java new file mode 100644 index 00000000000..04f8437d0a2 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/query/CommonTermsQueryBuilderTests.java @@ -0,0 +1,156 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query; + +import org.apache.lucene.queries.ExtendedCommonTermsQuery; +import org.apache.lucene.search.Query; +import org.junit.Test; + +import java.io.IOException; + +import static org.elasticsearch.index.query.QueryBuilders.commonTermsQuery; +import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.nullValue; + +public class CommonTermsQueryBuilderTests extends AbstractQueryTestCase { + + @Override + protected CommonTermsQueryBuilder doCreateTestQueryBuilder() { + CommonTermsQueryBuilder query; + + // mapped or unmapped field + String text = randomAsciiOfLengthBetween(1, 10); + if (randomBoolean()) { + query = new CommonTermsQueryBuilder(STRING_FIELD_NAME, text); + } else { + query = new CommonTermsQueryBuilder(randomAsciiOfLengthBetween(1, 10), text); + } + + if (randomBoolean()) { + query.cutoffFrequency((float) randomIntBetween(1, 10)); + } + + if (randomBoolean()) { + query.lowFreqOperator(randomFrom(Operator.values())); + } + + // number of low frequency terms that must match + if (randomBoolean()) { + query.lowFreqMinimumShouldMatch("" + randomIntBetween(1, 5)); + } + + if (randomBoolean()) { + query.highFreqOperator(randomFrom(Operator.values())); + } + + // number of high frequency terms that must match + if (randomBoolean()) { + query.highFreqMinimumShouldMatch("" + randomIntBetween(1, 5)); + } + + if (randomBoolean()) { + query.analyzer(randomAnalyzer()); + } + + if (randomBoolean()) { + query.disableCoord(randomBoolean()); + } + return query; + } + + @Override + protected void doAssertLuceneQuery(CommonTermsQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { + assertThat(query, instanceOf(ExtendedCommonTermsQuery.class)); + ExtendedCommonTermsQuery extendedCommonTermsQuery = (ExtendedCommonTermsQuery) query; + assertThat(extendedCommonTermsQuery.getHighFreqMinimumNumberShouldMatchSpec(), equalTo(queryBuilder.highFreqMinimumShouldMatch())); + assertThat(extendedCommonTermsQuery.getLowFreqMinimumNumberShouldMatchSpec(), equalTo(queryBuilder.lowFreqMinimumShouldMatch())); + } + + @Test + public void testIllegalArguments() { + try { + if (randomBoolean()) { + new CommonTermsQueryBuilder(null, "text"); + } else { + new CommonTermsQueryBuilder("", "text"); + } + fail("must be non null"); + } catch (IllegalArgumentException e) { + // okay + } + + try { + new CommonTermsQueryBuilder("fieldName", null); + fail("must be non null"); + } catch (IllegalArgumentException e) { + // okay + } + } + + @Test + public void testNoTermsFromQueryString() throws IOException { + CommonTermsQueryBuilder builder = new CommonTermsQueryBuilder(STRING_FIELD_NAME, ""); + QueryShardContext context = createShardContext(); + context.setAllowUnmappedFields(true); + assertNull(builder.toQuery(context)); + } + + @Test + public void testCommonTermsQuery1() throws IOException { + String query = copyToStringFromClasspath("/org/elasticsearch/index/query/commonTerms-query1.json"); + Query parsedQuery = parseQuery(query).toQuery(createShardContext()); + assertThat(parsedQuery, instanceOf(ExtendedCommonTermsQuery.class)); + ExtendedCommonTermsQuery ectQuery = (ExtendedCommonTermsQuery) parsedQuery; + assertThat(ectQuery.getHighFreqMinimumNumberShouldMatchSpec(), nullValue()); + assertThat(ectQuery.getLowFreqMinimumNumberShouldMatchSpec(), equalTo("2")); + } + + @Test + public void testCommonTermsQuery2() throws IOException { + String query = copyToStringFromClasspath("/org/elasticsearch/index/query/commonTerms-query2.json"); + Query parsedQuery = parseQuery(query).toQuery(createShardContext()); + assertThat(parsedQuery, instanceOf(ExtendedCommonTermsQuery.class)); + ExtendedCommonTermsQuery ectQuery = (ExtendedCommonTermsQuery) parsedQuery; + assertThat(ectQuery.getHighFreqMinimumNumberShouldMatchSpec(), equalTo("50%")); + assertThat(ectQuery.getLowFreqMinimumNumberShouldMatchSpec(), equalTo("5<20%")); + } + + @Test + public void testCommonTermsQuery3() throws IOException { + String query = copyToStringFromClasspath("/org/elasticsearch/index/query/commonTerms-query3.json"); + Query parsedQuery = parseQuery(query).toQuery(createShardContext()); + assertThat(parsedQuery, instanceOf(ExtendedCommonTermsQuery.class)); + ExtendedCommonTermsQuery ectQuery = (ExtendedCommonTermsQuery) parsedQuery; + assertThat(ectQuery.getHighFreqMinimumNumberShouldMatchSpec(), nullValue()); + assertThat(ectQuery.getLowFreqMinimumNumberShouldMatchSpec(), equalTo("2")); + } + + @Test // see #11730 + public void testCommonTermsQuery4() throws IOException { + boolean disableCoord = randomBoolean(); + Query parsedQuery = parseQuery(commonTermsQuery("field", "text").disableCoord(disableCoord).buildAsBytes()).toQuery(createShardContext()); + assertThat(parsedQuery, instanceOf(ExtendedCommonTermsQuery.class)); + ExtendedCommonTermsQuery ectQuery = (ExtendedCommonTermsQuery) parsedQuery; + assertThat(ectQuery.isCoordDisabled(), equalTo(disableCoord)); + } + +} diff --git a/core/src/test/java/org/elasticsearch/index/query/ConstantScoreQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/ConstantScoreQueryBuilderTests.java new file mode 100644 index 00000000000..bb6d22eb144 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/query/ConstantScoreQueryBuilderTests.java @@ -0,0 +1,71 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query; + +import org.apache.lucene.search.ConstantScoreQuery; +import org.apache.lucene.search.Query; +import org.elasticsearch.common.ParsingException; +import org.junit.Test; + +import java.io.IOException; + +import static org.hamcrest.CoreMatchers.*; + +public class ConstantScoreQueryBuilderTests extends AbstractQueryTestCase { + + /** + * @return a {@link ConstantScoreQueryBuilder} with random boost between 0.1f and 2.0f + */ + @Override + protected ConstantScoreQueryBuilder doCreateTestQueryBuilder() { + return new ConstantScoreQueryBuilder(RandomQueryBuilder.createQuery(random())); + } + + @Override + protected void doAssertLuceneQuery(ConstantScoreQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { + Query innerQuery = queryBuilder.innerQuery().toQuery(context); + if (innerQuery == null) { + assertThat(query, nullValue()); + } else { + assertThat(query, instanceOf(ConstantScoreQuery.class)); + ConstantScoreQuery constantScoreQuery = (ConstantScoreQuery) query; + assertThat(constantScoreQuery.getQuery(), equalTo(innerQuery)); + } + } + + /** + * test that missing "filter" element causes {@link ParsingException} + */ + @Test(expected=ParsingException.class) + public void testFilterElement() throws IOException { + String queryString = "{ \"" + ConstantScoreQueryBuilder.NAME + "\" : {}"; + parseQuery(queryString); + } + + @Test + public void testIllegalArguments() { + try { + new ConstantScoreQueryBuilder(null); + fail("must not be null"); + } catch (IllegalArgumentException e) { + // expected + } + } +} diff --git a/core/src/test/java/org/elasticsearch/index/query/DisMaxQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/DisMaxQueryBuilderTests.java new file mode 100644 index 00000000000..0dac9c99c00 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/query/DisMaxQueryBuilderTests.java @@ -0,0 +1,146 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query; + +import org.apache.lucene.index.Term; +import org.apache.lucene.search.DisjunctionMaxQuery; +import org.apache.lucene.search.PrefixQuery; +import org.apache.lucene.search.Query; +import org.junit.Test; + +import java.io.IOException; +import java.util.*; + +import static org.hamcrest.CoreMatchers.nullValue; +import static org.hamcrest.Matchers.*; + +public class DisMaxQueryBuilderTests extends AbstractQueryTestCase { + + /** + * @return a {@link DisMaxQueryBuilder} with random inner queries + */ + @Override + protected DisMaxQueryBuilder doCreateTestQueryBuilder() { + DisMaxQueryBuilder dismax = new DisMaxQueryBuilder(); + int clauses = randomIntBetween(1, 5); + for (int i = 0; i < clauses; i++) { + dismax.add(RandomQueryBuilder.createQuery(random())); + } + if (randomBoolean()) { + dismax.tieBreaker(2.0f / randomIntBetween(1, 20)); + } + return dismax; + } + + @Override + protected void doAssertLuceneQuery(DisMaxQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { + Collection queries = AbstractQueryBuilder.toQueries(queryBuilder.innerQueries(), context); + if (queries.isEmpty()) { + assertThat(query, nullValue()); + } else { + assertThat(query, instanceOf(DisjunctionMaxQuery.class)); + DisjunctionMaxQuery disjunctionMaxQuery = (DisjunctionMaxQuery) query; + assertThat(disjunctionMaxQuery.getTieBreakerMultiplier(), equalTo(queryBuilder.tieBreaker())); + assertThat(disjunctionMaxQuery.getDisjuncts().size(), equalTo(queries.size())); + Iterator queryIterator = queries.iterator(); + for (int i = 0; i < disjunctionMaxQuery.getDisjuncts().size(); i++) { + assertThat(disjunctionMaxQuery.getDisjuncts().get(i), equalTo(queryIterator.next())); + } + } + } + + @Override + protected Map getAlternateVersions() { + Map alternateVersions = new HashMap<>(); + QueryBuilder innerQuery = createTestQueryBuilder().innerQueries().get(0); + DisMaxQueryBuilder expectedQuery = new DisMaxQueryBuilder(); + expectedQuery.add(innerQuery); + String contentString = "{\n" + + " \"dis_max\" : {\n" + + " \"queries\" : " + innerQuery.toString() + + " }\n" + + "}"; + alternateVersions.put(contentString, expectedQuery); + return alternateVersions; + } + + /** + * test `null`return value for missing inner queries + */ + @Test + public void testNoInnerQueries() throws IOException { + DisMaxQueryBuilder disMaxBuilder = new DisMaxQueryBuilder(); + assertNull(disMaxBuilder.toQuery(createShardContext())); + } + + /** + * Test inner query parsing to null. Current DSL allows inner filter element to parse to null. + * Those should be ignored upstream. To test this, we use inner {@link ConstantScoreQueryBuilder} + * with empty inner filter. + */ + @Test + public void testInnerQueryReturnsNull() throws IOException { + String queryString = "{ \"" + ConstantScoreQueryBuilder.NAME + "\" : { \"filter\" : { } } }"; + QueryBuilder innerQueryBuilder = parseQuery(queryString); + DisMaxQueryBuilder disMaxBuilder = new DisMaxQueryBuilder().add(innerQueryBuilder); + assertNull(disMaxBuilder.toQuery(createShardContext())); + } + + @Test + public void testIllegalArguments() { + DisMaxQueryBuilder disMaxQuery = new DisMaxQueryBuilder(); + try { + disMaxQuery.add(null); + fail("cannot be null"); + } catch (IllegalArgumentException e) { + // expected + } + } + + @Test + public void testToQueryInnerPrefixQuery() throws Exception { + assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); + String queryAsString = "{\n" + + " \"dis_max\":{\n" + + " \"queries\":[\n" + + " {\n" + + " \"prefix\":{\n" + + " \"" + STRING_FIELD_NAME + "\":{\n" + + " \"value\":\"sh\",\n" + + " \"boost\":1.2\n" + + " }\n" + + " }\n" + + " }\n" + + " ]\n" + + " }\n" + + "}"; + Query query = parseQuery(queryAsString).toQuery(createShardContext()); + assertThat(query, instanceOf(DisjunctionMaxQuery.class)); + DisjunctionMaxQuery disjunctionMaxQuery = (DisjunctionMaxQuery) query; + + List disjuncts = disjunctionMaxQuery.getDisjuncts(); + assertThat(disjuncts.size(), equalTo(1)); + + PrefixQuery firstQ = (PrefixQuery) disjuncts.get(0); + // since age is automatically registered in data, we encode it as numeric + assertThat(firstQ.getPrefix(), equalTo(new Term(STRING_FIELD_NAME, "sh"))); + assertThat((double) firstQ.getBoost(), closeTo(1.2, 0.00001)); + } +} diff --git a/core/src/test/java/org/elasticsearch/index/query/ExistsQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/ExistsQueryBuilderTests.java new file mode 100644 index 00000000000..92523bb99f3 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/query/ExistsQueryBuilderTests.java @@ -0,0 +1,96 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query; + +import org.apache.lucene.search.BooleanClause; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.ConstantScoreQuery; +import org.apache.lucene.search.Query; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.index.mapper.object.ObjectMapper; +import org.junit.Test; + +import java.io.IOException; +import java.util.Collection; + +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.instanceOf; + +public class ExistsQueryBuilderTests extends AbstractQueryTestCase { + + @Override + protected ExistsQueryBuilder doCreateTestQueryBuilder() { + String fieldPattern; + if (randomBoolean()) { + fieldPattern = randomFrom(MAPPED_FIELD_NAMES); + } else { + fieldPattern = randomAsciiOfLengthBetween(1, 10); + } + // also sometimes test wildcard patterns + if (randomBoolean()) { + if (randomBoolean()) { + fieldPattern = fieldPattern + "*"; + } else { + fieldPattern = MetaData.ALL; + } + } + return new ExistsQueryBuilder(fieldPattern); + } + + @Override + protected void doAssertLuceneQuery(ExistsQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { + String fieldPattern = queryBuilder.fieldName(); + ObjectMapper objectMapper = context.getObjectMapper(fieldPattern); + if (objectMapper != null) { + // automatic make the object mapper pattern + fieldPattern = fieldPattern + ".*"; + } + Collection fields = context.simpleMatchToIndexNames(fieldPattern); + if (getCurrentTypes().length == 0 || fields.size() == 0) { + assertThat(query, instanceOf(BooleanQuery.class)); + BooleanQuery booleanQuery = (BooleanQuery) query; + assertThat(booleanQuery.clauses().size(), equalTo(0)); + } else { + assertThat(query, instanceOf(ConstantScoreQuery.class)); + ConstantScoreQuery constantScoreQuery = (ConstantScoreQuery) query; + assertThat(constantScoreQuery.getQuery(), instanceOf(BooleanQuery.class)); + BooleanQuery booleanQuery = (BooleanQuery) constantScoreQuery.getQuery(); + assertThat(booleanQuery.clauses().size(), equalTo(fields.size())); + for (int i = 0; i < fields.size(); i++) { + BooleanClause booleanClause = booleanQuery.clauses().get(i); + assertThat(booleanClause.getOccur(), equalTo(BooleanClause.Occur.SHOULD)); + } + } + } + + @Test + public void testIllegalArguments() { + try { + if (randomBoolean()) { + new ExistsQueryBuilder(null); + } else { + new ExistsQueryBuilder(""); + } + fail("must not be null or empty"); + } catch (IllegalArgumentException e) { + // expected + } + } +} diff --git a/core/src/test/java/org/elasticsearch/index/query/FieldMaskingSpanQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/FieldMaskingSpanQueryBuilderTests.java new file mode 100644 index 00000000000..64724d28b0f --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/query/FieldMaskingSpanQueryBuilderTests.java @@ -0,0 +1,80 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query; + +import org.apache.lucene.search.Query; +import org.apache.lucene.search.spans.FieldMaskingSpanQuery; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.junit.Test; + +import java.io.IOException; + +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.instanceOf; + +public class FieldMaskingSpanQueryBuilderTests extends AbstractQueryTestCase { + + @Override + protected FieldMaskingSpanQueryBuilder doCreateTestQueryBuilder() { + String fieldName; + if (randomBoolean()) { + fieldName = randomFrom(MAPPED_FIELD_NAMES); + } else { + fieldName = randomAsciiOfLengthBetween(1, 10); + } + SpanTermQueryBuilder innerQuery = new SpanTermQueryBuilderTests().createTestQueryBuilder(); + return new FieldMaskingSpanQueryBuilder(innerQuery, fieldName); + } + + @Override + protected void doAssertLuceneQuery(FieldMaskingSpanQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { + String fieldInQuery = queryBuilder.fieldName(); + MappedFieldType fieldType = context.fieldMapper(fieldInQuery); + if (fieldType != null) { + fieldInQuery = fieldType.names().indexName(); + } + assertThat(query, instanceOf(FieldMaskingSpanQuery.class)); + FieldMaskingSpanQuery fieldMaskingSpanQuery = (FieldMaskingSpanQuery) query; + assertThat(fieldMaskingSpanQuery.getField(), equalTo(fieldInQuery)); + assertThat(fieldMaskingSpanQuery.getMaskedQuery(), equalTo(queryBuilder.innerQuery().toQuery(context))); + } + + @Test + public void testIllegalArguments() { + try { + new FieldMaskingSpanQueryBuilder(null, "maskedField"); + fail("must be non null"); + } catch (IllegalArgumentException e) { + // okay + } + + try { + SpanQueryBuilder span = new SpanTermQueryBuilder("name", "value"); + if (randomBoolean()) { + new FieldMaskingSpanQueryBuilder(span, null); + } else { + new FieldMaskingSpanQueryBuilder(span, ""); + } + fail("must be non null"); + } catch (IllegalArgumentException e) { + // okay + } + } +} diff --git a/core/src/test/java/org/elasticsearch/index/query/FuzzyQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/FuzzyQueryBuilderTests.java new file mode 100644 index 00000000000..13fdb9563e6 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/query/FuzzyQueryBuilderTests.java @@ -0,0 +1,149 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query; + +import org.apache.lucene.index.Term; +import org.apache.lucene.search.FuzzyQuery; +import org.apache.lucene.search.NumericRangeQuery; +import org.apache.lucene.search.Query; +import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.unit.Fuzziness; +import org.hamcrest.Matchers; +import org.junit.Test; + +import java.io.IOException; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; + +public class FuzzyQueryBuilderTests extends AbstractQueryTestCase { + + @Override + protected FuzzyQueryBuilder doCreateTestQueryBuilder() { + Tuple fieldAndValue = getRandomFieldNameAndValue(); + FuzzyQueryBuilder query = new FuzzyQueryBuilder(fieldAndValue.v1(), fieldAndValue.v2()); + if (randomBoolean()) { + query.fuzziness(randomFuzziness(query.fieldName())); + } + if (randomBoolean()) { + query.prefixLength(randomIntBetween(0, 10)); + } + if (randomBoolean()) { + query.maxExpansions(randomIntBetween(1, 10)); + } + if (randomBoolean()) { + query.transpositions(randomBoolean()); + } + if (randomBoolean()) { + query.rewrite(getRandomRewriteMethod()); + } + return query; + } + + @Override + protected void doAssertLuceneQuery(FuzzyQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { + if (isNumericFieldName(queryBuilder.fieldName()) || queryBuilder.fieldName().equals(DATE_FIELD_NAME)) { + assertThat(query, instanceOf(NumericRangeQuery.class)); + } else { + assertThat(query, instanceOf(FuzzyQuery.class)); + } + } + + @Test + public void testIllegalArguments() { + try { + new FuzzyQueryBuilder(null, "text"); + fail("must not be null"); + } catch (IllegalArgumentException e) { + // expected + } + + try { + new FuzzyQueryBuilder("", "text"); + fail("must not be empty"); + } catch (IllegalArgumentException e) { + // expected + } + + try { + new FuzzyQueryBuilder("field", null); + fail("must not be null"); + } catch (IllegalArgumentException e) { + // expected + } + } + + @Test + public void testUnsupportedFuzzinessForStringType() throws IOException { + QueryShardContext context = createShardContext(); + context.setAllowUnmappedFields(true); + + FuzzyQueryBuilder fuzzyQueryBuilder = new FuzzyQueryBuilder(STRING_FIELD_NAME, "text"); + fuzzyQueryBuilder.fuzziness(Fuzziness.build(randomFrom("a string which is not auto", "3h", "200s"))); + + try { + fuzzyQueryBuilder.toQuery(context); + fail("should have failed with NumberFormatException"); + } catch (NumberFormatException e) { + assertThat(e.getMessage(), Matchers.containsString("For input string")); + } + } + + @Test + public void testToQueryWithStringField() throws IOException { + assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); + String query = "{\n" + + " \"fuzzy\":{\n" + + " \"" + STRING_FIELD_NAME + "\":{\n" + + " \"value\":\"sh\",\n" + + " \"fuzziness\": \"AUTO\",\n" + + " \"prefix_length\":1,\n" + + " \"boost\":2.0\n" + + " }\n" + + " }\n" + + "}"; + Query parsedQuery = parseQuery(query).toQuery(createShardContext()); + assertThat(parsedQuery, instanceOf(FuzzyQuery.class)); + FuzzyQuery fuzzyQuery = (FuzzyQuery) parsedQuery; + assertThat(fuzzyQuery.getTerm(), equalTo(new Term(STRING_FIELD_NAME, "sh"))); + assertThat(fuzzyQuery.getMaxEdits(), equalTo(Fuzziness.AUTO.asDistance("sh"))); + assertThat(fuzzyQuery.getPrefixLength(), equalTo(1)); + assertThat(fuzzyQuery.getBoost(), equalTo(2.0f)); + } + + @Test + public void testToQueryWithNumericField() throws IOException { + assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); + String query = "{\n" + + " \"fuzzy\":{\n" + + " \"" + INT_FIELD_NAME + "\":{\n" + + " \"value\":12,\n" + + " \"fuzziness\":5,\n" + + " \"boost\":2.0\n" + + " }\n" + + " }\n" + + "}\n"; + Query parsedQuery = parseQuery(query).toQuery(createShardContext()); + assertThat(parsedQuery, instanceOf(NumericRangeQuery.class)); + NumericRangeQuery fuzzyQuery = (NumericRangeQuery) parsedQuery; + assertThat(fuzzyQuery.getMin().longValue(), equalTo(7l)); + assertThat(fuzzyQuery.getMax().longValue(), equalTo(17l)); + } +} diff --git a/core/src/test/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilderTests.java new file mode 100644 index 00000000000..c138ad0e310 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilderTests.java @@ -0,0 +1,423 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query; + +import com.spatial4j.core.io.GeohashUtils; +import com.spatial4j.core.shape.Rectangle; +import org.apache.lucene.search.*; +import org.elasticsearch.common.geo.GeoPoint; +import org.elasticsearch.common.geo.GeoUtils; +import org.elasticsearch.index.search.geo.InMemoryGeoBoundingBoxQuery; +import org.elasticsearch.test.geo.RandomShapeGenerator; +import org.junit.Test; + +import java.io.IOException; + +import static org.hamcrest.Matchers.closeTo; +import static org.hamcrest.Matchers.equalTo; + +public class GeoBoundingBoxQueryBuilderTests extends AbstractQueryTestCase { + /** Randomly generate either NaN or one of the two infinity values. */ + private static Double[] brokenDoubles = {Double.NaN, Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY}; + + @Override + protected GeoBoundingBoxQueryBuilder doCreateTestQueryBuilder() { + GeoBoundingBoxQueryBuilder builder = new GeoBoundingBoxQueryBuilder(GEO_POINT_FIELD_NAME); + Rectangle box = RandomShapeGenerator.xRandomRectangle(getRandom(), RandomShapeGenerator.xRandomPoint(getRandom())); + + if (randomBoolean()) { + // check the top-left/bottom-right combination of setters + int path = randomIntBetween(0, 2); + switch (path) { + case 0: + builder.setCorners( + new GeoPoint(box.getMaxY(), box.getMinX()), + new GeoPoint(box.getMinY(), box.getMaxX())); + break; + case 1: + builder.setCorners( + GeohashUtils.encodeLatLon(box.getMaxY(), box.getMinX()), + GeohashUtils.encodeLatLon(box.getMinY(), box.getMaxX())); + break; + default: + builder.setCorners(box.getMaxY(), box.getMinX(), box.getMinY(), box.getMaxX()); + } + } else { + // check the bottom-left/ top-right combination of setters + if (randomBoolean()) { + builder.setCornersOGC( + new GeoPoint(box.getMinY(), box.getMinX()), + new GeoPoint(box.getMaxY(), box.getMaxX())); + } else { + builder.setCornersOGC( + GeohashUtils.encodeLatLon(box.getMinY(), box.getMinX()), + GeohashUtils.encodeLatLon(box.getMaxY(), box.getMaxX())); + } + } + + if (randomBoolean()) { + builder.setValidationMethod(randomFrom(GeoValidationMethod.values())); + } + + builder.type(randomFrom(GeoExecType.values())); + return builder; + } + + @Test(expected = IllegalArgumentException.class) + public void testValidationNullFieldname() { + new GeoBoundingBoxQueryBuilder(null); + } + + + @Test(expected = IllegalArgumentException.class) + public void testValidationNullType() { + GeoBoundingBoxQueryBuilder qb = new GeoBoundingBoxQueryBuilder("teststring"); + qb.type((GeoExecType) null); + } + + @Test(expected = IllegalArgumentException.class) + public void testValidationNullTypeString() { + GeoBoundingBoxQueryBuilder qb = new GeoBoundingBoxQueryBuilder("teststring"); + qb.type((String) null); + } + + @Test + @Override + public void testToQuery() throws IOException { + assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); + super.testToQuery(); + } + + @Test(expected = QueryShardException.class) + public void testExceptionOnMissingTypes() throws IOException { + assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length == 0); + super.testToQuery(); + } + + @Test + public void testBrokenCoordinateCannotBeSet() { + PointTester[] testers = { new TopTester(), new LeftTester(), new BottomTester(), new RightTester() }; + + GeoBoundingBoxQueryBuilder builder = createTestQueryBuilder(); + builder.setValidationMethod(GeoValidationMethod.STRICT); + + for (PointTester tester : testers) { + try { + tester.invalidateCoordinate(builder, true); + fail("expected exception for broken " + tester.getClass().getName() + " coordinate"); + } catch (IllegalArgumentException e) { + // exptected + } + } + } + + @Test + public void testBrokenCoordinateCanBeSetWithIgnoreMalformed() { + PointTester[] testers = { new TopTester(), new LeftTester(), new BottomTester(), new RightTester() }; + + GeoBoundingBoxQueryBuilder builder = createTestQueryBuilder(); + builder.setValidationMethod(GeoValidationMethod.IGNORE_MALFORMED); + + for (PointTester tester : testers) { + tester.invalidateCoordinate(builder, true); + } + } + + + @Test + public void testValidation() { + PointTester[] testers = { new TopTester(), new LeftTester(), new BottomTester(), new RightTester() }; + + for (PointTester tester : testers) { + QueryValidationException except = null; + + GeoBoundingBoxQueryBuilder builder = createTestQueryBuilder(); + tester.invalidateCoordinate(builder.setValidationMethod(GeoValidationMethod.COERCE), false); + except = builder.checkLatLon(true); + assertNull("Inner post 2.0 validation w/ coerce should ignore invalid " + + tester.getClass().getName() + + " coordinate: " + + tester.invalidCoordinate + " ", + except); + + tester.invalidateCoordinate(builder.setValidationMethod(GeoValidationMethod.COERCE), false); + except = builder.checkLatLon(false); + assertNull("Inner pre 2.0 validation w/ coerce should ignore invalid coordinate: " + + tester.getClass().getName() + + " coordinate: " + + tester.invalidCoordinate + " ", + except); + + tester.invalidateCoordinate(builder.setValidationMethod(GeoValidationMethod.STRICT), false); + except = builder.checkLatLon(true); + assertNull("Inner pre 2.0 validation w/o coerce should ignore invalid coordinate for old indexes: " + + tester.getClass().getName() + + " coordinate: " + + tester.invalidCoordinate, + except); + + tester.invalidateCoordinate(builder.setValidationMethod(GeoValidationMethod.STRICT), false); + except = builder.checkLatLon(false); + assertNotNull("Inner post 2.0 validation w/o coerce should detect invalid coordinate: " + + tester.getClass().getName() + + " coordinate: " + + tester.invalidCoordinate, + except); + } + } + + @Test(expected = IllegalArgumentException.class) + public void testTopBottomCannotBeFlipped() { + GeoBoundingBoxQueryBuilder builder = createTestQueryBuilder(); + double top = builder.topLeft().getLat(); + double left = builder.topLeft().getLon(); + double bottom = builder.bottomRight().getLat(); + double right = builder.bottomRight().getLon(); + + assumeTrue("top should not be equal to bottom for flip check", top != bottom); + System.out.println("top: " + top + " bottom: " + bottom); + builder.setValidationMethod(GeoValidationMethod.STRICT).setCorners(bottom, left, top, right); + } + + @Test + public void testTopBottomCanBeFlippedOnIgnoreMalformed() { + GeoBoundingBoxQueryBuilder builder = createTestQueryBuilder(); + double top = builder.topLeft().getLat(); + double left = builder.topLeft().getLon(); + double bottom = builder.bottomRight().getLat(); + double right = builder.bottomRight().getLon(); + + assumeTrue("top should not be equal to bottom for flip check", top != bottom); + builder.setValidationMethod(GeoValidationMethod.IGNORE_MALFORMED).setCorners(bottom, left, top, right); + } + + @Test + public void testLeftRightCanBeFlipped() { + GeoBoundingBoxQueryBuilder builder = createTestQueryBuilder(); + double top = builder.topLeft().getLat(); + double left = builder.topLeft().getLon(); + double bottom = builder.bottomRight().getLat(); + double right = builder.bottomRight().getLon(); + + builder.setValidationMethod(GeoValidationMethod.IGNORE_MALFORMED).setCorners(top, right, bottom, left); + builder.setValidationMethod(GeoValidationMethod.STRICT).setCorners(top, right, bottom, left); + } + + @Test + public void testNormalization() throws IOException { + assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); + GeoBoundingBoxQueryBuilder qb = createTestQueryBuilder(); + if (getCurrentTypes().length != 0 && "mapped_geo".equals(qb.fieldName())) { + // only execute this test if we are running on a valid geo field + qb.setCorners(200, 200, qb.bottomRight().getLat(), qb.bottomRight().getLon()); + qb.setValidationMethod(GeoValidationMethod.COERCE); + Query query = qb.toQuery(createShardContext()); + if (query instanceof ConstantScoreQuery) { + ConstantScoreQuery result = (ConstantScoreQuery) query; + BooleanQuery bboxFilter = (BooleanQuery) result.getQuery(); + for (BooleanClause clause : bboxFilter.clauses()) { + NumericRangeQuery boundary = (NumericRangeQuery) clause.getQuery(); + if (boundary.getMax() != null) { + assertTrue("If defined, non of the maximum range values should be larger than 180", boundary.getMax().intValue() <= 180); + } + } + } else { + assertTrue("memory queries should result in InMemoryGeoBoundingBoxQuery", query instanceof InMemoryGeoBoundingBoxQuery); + } + } + } + + @Test + public void checkStrictnessDefault() { + assertFalse("Someone changed the default for coordinate validation - were the docs changed as well?", GeoValidationMethod.DEFAULT_LENIENT_PARSING); + } + + @Override + protected void doAssertLuceneQuery(GeoBoundingBoxQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { + if (queryBuilder.type() == GeoExecType.INDEXED) { + assertTrue("Found no indexed geo query.", query instanceof ConstantScoreQuery); + } else { + assertTrue("Found no indexed geo query.", query instanceof InMemoryGeoBoundingBoxQuery); + } + } + + public abstract class PointTester { + private double brokenCoordinate = randomFrom(brokenDoubles); + private double invalidCoordinate; + + public PointTester(double invalidCoodinate) { + this.invalidCoordinate = invalidCoodinate; + } + public void invalidateCoordinate(GeoBoundingBoxQueryBuilder qb, boolean useBrokenDouble) { + if (useBrokenDouble) { + fillIn(brokenCoordinate, qb); + } else { + fillIn(invalidCoordinate, qb); + } + } + protected abstract void fillIn(double fillIn, GeoBoundingBoxQueryBuilder qb); + } + + public class TopTester extends PointTester { + public TopTester() { + super(randomDoubleBetween(GeoUtils.MAX_LAT, Double.MAX_VALUE, false)); + } + + @Override + public void fillIn(double coordinate, GeoBoundingBoxQueryBuilder qb) { + qb.setCorners(coordinate, qb.topLeft().getLon(), qb.bottomRight().getLat(), qb.bottomRight().getLon()); + } + } + + public class LeftTester extends PointTester { + public LeftTester() { + super(randomDoubleBetween(-Double.MAX_VALUE, GeoUtils.MIN_LON, true)); + } + + @Override + public void fillIn(double coordinate, GeoBoundingBoxQueryBuilder qb) { + qb.setCorners(qb.topLeft().getLat(), coordinate, qb.bottomRight().getLat(), qb.bottomRight().getLon()); + } + } + + public class BottomTester extends PointTester { + public BottomTester() { + super(randomDoubleBetween(-Double.MAX_VALUE, GeoUtils.MIN_LAT, false)); + } + + @Override + public void fillIn(double coordinate, GeoBoundingBoxQueryBuilder qb) { + qb.setCorners(qb.topLeft().getLat(), qb.topLeft().getLon(), coordinate, qb.bottomRight().getLon()); + } + } + + public class RightTester extends PointTester { + public RightTester() { + super(randomDoubleBetween(GeoUtils.MAX_LON, Double.MAX_VALUE, true)); + } + + @Override + public void fillIn(double coordinate, GeoBoundingBoxQueryBuilder qb) { + qb.setCorners(qb.topLeft().getLat(), qb.topLeft().getLon(), qb.topLeft().getLat(), coordinate); + } + } + + @Test + public void testParsingAndToQuery1() throws IOException { + assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); + String query = "{\n" + + " \"geo_bounding_box\":{\n" + + " \"" + GEO_POINT_FIELD_NAME+ "\":{\n" + + " \"top_left\":[-70, 40],\n" + + " \"bottom_right\":[-80, 30]\n" + + " }\n" + + " }\n" + + "}\n"; + assertGeoBoundingBoxQuery(query); + } + + @Test + public void testParsingAndToQuery2() throws IOException { + assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); + String query = "{\n" + + " \"geo_bounding_box\":{\n" + + " \"" + GEO_POINT_FIELD_NAME+ "\":{\n" + + " \"top_left\":{\n" + + " \"lat\":40,\n" + + " \"lon\":-70\n" + + " },\n" + + " \"bottom_right\":{\n" + + " \"lat\":30,\n" + + " \"lon\":-80\n" + + " }\n" + + " }\n" + + " }\n" + + "}\n"; + assertGeoBoundingBoxQuery(query); + } + + @Test + public void testParsingAndToQuery3() throws IOException { + assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); + String query = "{\n" + + " \"geo_bounding_box\":{\n" + + " \"" + GEO_POINT_FIELD_NAME+ "\":{\n" + + " \"top_left\":\"40, -70\",\n" + + " \"bottom_right\":\"30, -80\"\n" + + " }\n" + + " }\n" + + "}\n"; + assertGeoBoundingBoxQuery(query); + } + + @Test + public void testParsingAndToQuery4() throws IOException { + assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); + String query = "{\n" + + " \"geo_bounding_box\":{\n" + + " \"" + GEO_POINT_FIELD_NAME+ "\":{\n" + + " \"top_left\":\"drn5x1g8cu2y\",\n" + + " \"bottom_right\":\"30, -80\"\n" + + " }\n" + + " }\n" + + "}\n"; + assertGeoBoundingBoxQuery(query); + } + + @Test + public void testParsingAndToQuery5() throws IOException { + assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); + String query = "{\n" + + " \"geo_bounding_box\":{\n" + + " \"" + GEO_POINT_FIELD_NAME+ "\":{\n" + + " \"top_right\":\"40, -80\",\n" + + " \"bottom_left\":\"30, -70\"\n" + + " }\n" + + " }\n" + + "}\n"; + assertGeoBoundingBoxQuery(query); + } + + @Test + public void testParsingAndToQuery6() throws IOException { + assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); + String query = "{\n" + + " \"geo_bounding_box\":{\n" + + " \"" + GEO_POINT_FIELD_NAME+ "\":{\n" + + " \"right\": -80,\n" + + " \"top\": 40,\n" + + " \"left\": -70,\n" + + " \"bottom\": 30\n" + + " }\n" + + " }\n" + + "}\n"; + assertGeoBoundingBoxQuery(query); + } + + private void assertGeoBoundingBoxQuery(String query) throws IOException { + Query parsedQuery = parseQuery(query).toQuery(createShardContext()); + InMemoryGeoBoundingBoxQuery filter = (InMemoryGeoBoundingBoxQuery) parsedQuery; + assertThat(filter.fieldName(), equalTo(GEO_POINT_FIELD_NAME)); + assertThat(filter.topLeft().lat(), closeTo(40, 0.00001)); + assertThat(filter.topLeft().lon(), closeTo(-70, 0.00001)); + assertThat(filter.bottomRight().lat(), closeTo(30, 0.00001)); + assertThat(filter.bottomRight().lon(), closeTo(-80, 0.00001)); + } +} diff --git a/core/src/test/java/org/elasticsearch/index/query/GeoDistanceQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/GeoDistanceQueryBuilderTests.java new file mode 100644 index 00000000000..f65b523b1bf --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/query/GeoDistanceQueryBuilderTests.java @@ -0,0 +1,374 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query; + +import com.spatial4j.core.shape.Point; +import org.apache.lucene.search.Query; +import org.elasticsearch.common.geo.GeoDistance; +import org.elasticsearch.common.geo.GeoPoint; +import org.elasticsearch.common.unit.DistanceUnit; +import org.elasticsearch.index.search.geo.GeoDistanceRangeQuery; +import org.elasticsearch.test.geo.RandomShapeGenerator; +import org.junit.Test; + +import java.io.IOException; + +import static org.hamcrest.Matchers.*; + +public class GeoDistanceQueryBuilderTests extends AbstractQueryTestCase { + + @Override + protected GeoDistanceQueryBuilder doCreateTestQueryBuilder() { + GeoDistanceQueryBuilder qb = new GeoDistanceQueryBuilder(GEO_POINT_FIELD_NAME); + String distance = "" + randomDouble(); + if (randomBoolean()) { + DistanceUnit unit = randomFrom(DistanceUnit.values()); + distance = distance + unit.toString(); + } + int selector = randomIntBetween(0, 2); + switch (selector) { + case 0: + qb.distance(randomDouble(), randomFrom(DistanceUnit.values())); + break; + case 1: + qb.distance(distance, randomFrom(DistanceUnit.values())); + break; + case 2: + qb.distance(distance); + break; + } + + Point p = RandomShapeGenerator.xRandomPoint(random()); + qb.point(new GeoPoint(p.getY(), p.getX())); + + if (randomBoolean()) { + qb.setValidationMethod(randomFrom(GeoValidationMethod.values())); + } + + if (randomBoolean()) { + qb.optimizeBbox(randomFrom("none", "memory", "indexed")); + } + + if (randomBoolean()) { + qb.geoDistance(randomFrom(GeoDistance.values())); + } + return qb; + } + + public void testIllegalValues() { + try { + if (randomBoolean()) { + new GeoDistanceQueryBuilder(""); + } else { + new GeoDistanceQueryBuilder(null); + } + fail("must not be null or empty"); + } catch (IllegalArgumentException ex) { + // expected + } + + GeoDistanceQueryBuilder query = new GeoDistanceQueryBuilder("fieldName"); + try { + if (randomBoolean()) { + query.distance(""); + } else { + query.distance(null); + } + fail("must not be null or empty"); + } catch (IllegalArgumentException ex) { + // expected + } + + try { + if (randomBoolean()) { + query.distance("", DistanceUnit.DEFAULT); + } else { + query.distance(null, DistanceUnit.DEFAULT); + } + fail("must not be null or empty"); + } catch (IllegalArgumentException ex) { + // expected + } + + try { + query.distance("1", null); + fail("unit must not be null"); + } catch (IllegalArgumentException ex) { + // expected + } + + try { + query.distance(1, null); + fail("unit must not be null"); + } catch (IllegalArgumentException ex) { + // expected + } + + try { + query.geohash(null); + fail("geohash must not be null"); + } catch (IllegalArgumentException ex) { + // expected + } + + try { + query.geoDistance(null); + fail("geodistance must not be null"); + } catch (IllegalArgumentException ex) { + // expected + } + + try { + query.optimizeBbox(null); + fail("optimizeBbox must not be null"); + } catch (IllegalArgumentException ex) { + // expected + } + } + + /** + * Overridden here to ensure the test is only run if at least one type is + * present in the mappings. Geo queries do not execute if the field is not + * explicitly mapped + */ + @Override + @Test + public void testToQuery() throws IOException { + assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); + super.testToQuery(); + } + + @Override + protected void doAssertLuceneQuery(GeoDistanceQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { + assertThat(query, instanceOf(GeoDistanceRangeQuery.class)); + GeoDistanceRangeQuery geoQuery = (GeoDistanceRangeQuery) query; + assertThat(geoQuery.fieldName(), equalTo(queryBuilder.fieldName())); + if (queryBuilder.point() != null) { + assertThat(geoQuery.lat(), equalTo(queryBuilder.point().lat())); + assertThat(geoQuery.lon(), equalTo(queryBuilder.point().lon())); + } + assertThat(geoQuery.geoDistance(), equalTo(queryBuilder.geoDistance())); + assertThat(geoQuery.minInclusiveDistance(), equalTo(Double.NEGATIVE_INFINITY)); + double distance = queryBuilder.distance(); + if (queryBuilder.geoDistance() != null) { + distance = queryBuilder.geoDistance().normalize(distance, DistanceUnit.DEFAULT); + } + assertThat(geoQuery.maxInclusiveDistance(), closeTo(distance, Math.abs(distance) / 1000)); + } + + @Test + public void testParsingAndToQuery1() throws IOException { + assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); + String query = "{\n" + + " \"geo_distance\":{\n" + + " \"distance\":\"12mi\",\n" + + " \"" + GEO_POINT_FIELD_NAME + "\":{\n" + + " \"lat\":40,\n" + + " \"lon\":-70\n" + + " }\n" + + " }\n" + + "}\n"; + assertGeoDistanceRangeQuery(query); + } + + @Test + public void testParsingAndToQuery2() throws IOException { + assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); + String query = "{\n" + + " \"geo_distance\":{\n" + + " \"distance\":\"12mi\",\n" + + " \"" + GEO_POINT_FIELD_NAME + "\":[-70, 40]\n" + + " }\n" + + "}\n"; + assertGeoDistanceRangeQuery(query); + } + + @Test + public void testParsingAndToQuery3() throws IOException { + assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); + String query = "{\n" + + " \"geo_distance\":{\n" + + " \"distance\":\"12mi\",\n" + + " \"" + GEO_POINT_FIELD_NAME + "\":\"40, -70\"\n" + + " }\n" + + "}\n"; + assertGeoDistanceRangeQuery(query); + } + + @Test + public void testParsingAndToQuery4() throws IOException { + assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); + String query = "{\n" + + " \"geo_distance\":{\n" + + " \"distance\":\"12mi\",\n" + + " \"" + GEO_POINT_FIELD_NAME + "\":\"drn5x1g8cu2y\"\n" + + " }\n" + + "}\n"; + assertGeoDistanceRangeQuery(query); + } + + @Test + public void testParsingAndToQuery5() throws IOException { + assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); + String query = "{\n" + + " \"geo_distance\":{\n" + + " \"distance\":12,\n" + + " \"unit\":\"mi\",\n" + + " \"" + GEO_POINT_FIELD_NAME + "\":{\n" + + " \"lat\":40,\n" + + " \"lon\":-70\n" + + " }\n" + + " }\n" + + "}\n"; + assertGeoDistanceRangeQuery(query); + } + + @Test + public void testParsingAndToQuery6() throws IOException { + assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); + String query = "{\n" + + " \"geo_distance\":{\n" + + " \"distance\":\"12\",\n" + + " \"unit\":\"mi\",\n" + + " \"" + GEO_POINT_FIELD_NAME + "\":{\n" + + " \"lat\":40,\n" + + " \"lon\":-70\n" + + " }\n" + + " }\n" + + "}\n"; + assertGeoDistanceRangeQuery(query); + } + + @Test + public void testParsingAndToQuery7() throws IOException { + assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); + String query = "{\n" + + " \"geo_distance\":{\n" + + " \"distance\":\"19.312128\",\n" + + " \"" + GEO_POINT_FIELD_NAME + "\":{\n" + + " \"lat\":40,\n" + + " \"lon\":-70\n" + + " }\n" + + " }\n" + + "}\n"; + Query parsedQuery = parseQuery(query).toQuery(createShardContext()); + GeoDistanceRangeQuery filter = (GeoDistanceRangeQuery) parsedQuery; + assertThat(filter.fieldName(), equalTo(GEO_POINT_FIELD_NAME)); + assertThat(filter.lat(), closeTo(40, 0.00001)); + assertThat(filter.lon(), closeTo(-70, 0.00001)); + assertThat(filter.minInclusiveDistance(), equalTo(Double.NEGATIVE_INFINITY)); + assertThat(filter.maxInclusiveDistance(), closeTo(DistanceUnit.DEFAULT.convert(0.012, DistanceUnit.MILES), 0.00001)); + } + + @Test + public void testParsingAndToQuery8() throws IOException { + assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); + String query = "{\n" + + " \"geo_distance\":{\n" + + " \"distance\":19.312128,\n" + + " \"" + GEO_POINT_FIELD_NAME + "\":{\n" + + " \"lat\":40,\n" + + " \"lon\":-70\n" + + " }\n" + + " }\n" + + "}\n"; + Query parsedQuery = parseQuery(query).toQuery(createShardContext()); + GeoDistanceRangeQuery filter = (GeoDistanceRangeQuery) parsedQuery; + assertThat(filter.fieldName(), equalTo(GEO_POINT_FIELD_NAME)); + assertThat(filter.lat(), closeTo(40, 0.00001)); + assertThat(filter.lon(), closeTo(-70, 0.00001)); + assertThat(filter.minInclusiveDistance(), equalTo(Double.NEGATIVE_INFINITY)); + assertThat(filter.maxInclusiveDistance(), closeTo(DistanceUnit.KILOMETERS.convert(12, DistanceUnit.MILES), 0.00001)); + } + + @Test + public void testParsingAndToQuery9() throws IOException { + assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); + String query = "{\n" + + " \"geo_distance\":{\n" + + " \"distance\":\"19.312128\",\n" + + " \"unit\":\"km\",\n" + + " \"" + GEO_POINT_FIELD_NAME + "\":{\n" + + " \"lat\":40,\n" + + " \"lon\":-70\n" + + " }\n" + + " }\n" + + "}\n"; + assertGeoDistanceRangeQuery(query); + } + + @Test + public void testParsingAndToQuery10() throws IOException { + assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); + String query = "{\n" + + " \"geo_distance\":{\n" + + " \"distance\":19.312128,\n" + + " \"unit\":\"km\",\n" + + " \"" + GEO_POINT_FIELD_NAME + "\":{\n" + + " \"lat\":40,\n" + + " \"lon\":-70\n" + + " }\n" + + " }\n" + + "}\n"; + assertGeoDistanceRangeQuery(query); + } + + @Test + public void testParsingAndToQuery11() throws IOException { + assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); + String query = "{\n" + + " \"geo_distance\":{\n" + + " \"distance\":\"19.312128km\",\n" + + " \"" + GEO_POINT_FIELD_NAME + "\":{\n" + + " \"lat\":40,\n" + + " \"lon\":-70\n" + + " }\n" + + " }\n" + + "}\n"; + assertGeoDistanceRangeQuery(query); + } + + @Test + public void testParsingAndToQuery12() throws IOException { + assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); + String query = "{\n" + + " \"geo_distance\":{\n" + + " \"distance\":\"12mi\",\n" + + " \"unit\":\"km\",\n" + + " \"" + GEO_POINT_FIELD_NAME + "\":{\n" + + " \"lat\":40,\n" + + " \"lon\":-70\n" + + " }\n" + + " }\n" + + "}\n"; + assertGeoDistanceRangeQuery(query); + } + + private void assertGeoDistanceRangeQuery(String query) throws IOException { + assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); + Query parsedQuery = parseQuery(query).toQuery(createShardContext()); + GeoDistanceRangeQuery filter = (GeoDistanceRangeQuery) parsedQuery; + assertThat(filter.fieldName(), equalTo(GEO_POINT_FIELD_NAME)); + assertThat(filter.lat(), closeTo(40, 0.00001)); + assertThat(filter.lon(), closeTo(-70, 0.00001)); + assertThat(filter.minInclusiveDistance(), equalTo(Double.NEGATIVE_INFINITY)); + assertThat(filter.maxInclusiveDistance(), closeTo(DistanceUnit.DEFAULT.convert(12, DistanceUnit.MILES), 0.00001)); + } +} diff --git a/core/src/test/java/org/elasticsearch/index/query/GeoDistanceRangeQueryTests.java b/core/src/test/java/org/elasticsearch/index/query/GeoDistanceRangeQueryTests.java new file mode 100644 index 00000000000..9dd1a55f98c --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/query/GeoDistanceRangeQueryTests.java @@ -0,0 +1,208 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query; + +import org.apache.lucene.search.Query; +import org.elasticsearch.common.geo.GeoDistance; +import org.elasticsearch.common.geo.GeoPoint; +import org.elasticsearch.common.unit.DistanceUnit; +import org.elasticsearch.index.search.geo.GeoDistanceRangeQuery; +import org.junit.Test; + +import java.io.IOException; + +import static org.hamcrest.Matchers.closeTo; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; + +public class GeoDistanceRangeQueryTests extends AbstractQueryTestCase { + + @Override + protected GeoDistanceRangeQueryBuilder doCreateTestQueryBuilder() { + GeoDistanceRangeQueryBuilder builder; + if (randomBoolean()) { + builder = new GeoDistanceRangeQueryBuilder(GEO_POINT_FIELD_NAME, randomGeohash(1, 12)); + } else { + double lat = randomDouble() * 180 - 90; + double lon = randomDouble() * 360 - 180; + if (randomBoolean()) { + builder = new GeoDistanceRangeQueryBuilder(GEO_POINT_FIELD_NAME, new GeoPoint(lat, lon)); + } else { + builder = new GeoDistanceRangeQueryBuilder(GEO_POINT_FIELD_NAME, lat, lon); + } + } + int fromValue = randomInt(1000000); + int toValue = randomIntBetween(fromValue, 1000000); + String fromToUnits = randomFrom(DistanceUnit.values()).toString(); + if (randomBoolean()) { + int branch = randomInt(2); + switch (branch) { + case 0: + builder.from(fromValue); + break; + case 1: + builder.to(toValue); + break; + case 2: + builder.from(fromValue); + builder.to(toValue); + break; + } + } else { + int branch = randomInt(2); + switch (branch) { + case 0: + builder.from(fromValue + fromToUnits); + break; + case 1: + builder.to(toValue + fromToUnits); + break; + case 2: + builder.from(fromValue + fromToUnits); + builder.to(toValue + fromToUnits); + break; + } + } + if (randomBoolean()) { + builder.includeLower(randomBoolean()); + } + if (randomBoolean()) { + builder.includeUpper(randomBoolean()); + } + if (randomBoolean()) { + builder.geoDistance(randomFrom(GeoDistance.values())); + } + if (randomBoolean()) { + builder.unit(randomFrom(DistanceUnit.values())); + } + if (randomBoolean()) { + builder.optimizeBbox(randomFrom("none", "memory", "indexed")); + } + if (randomBoolean()) { + builder.setValidationMethod(randomFrom(GeoValidationMethod.values())); + } + return builder; + } + + @Override + protected void doAssertLuceneQuery(GeoDistanceRangeQueryBuilder queryBuilder, Query query, QueryShardContext context) + throws IOException { + assertThat(query, instanceOf(GeoDistanceRangeQuery.class)); + GeoDistanceRangeQuery geoQuery = (GeoDistanceRangeQuery) query; + assertThat(geoQuery.fieldName(), equalTo(queryBuilder.fieldName())); + if (queryBuilder.point() != null) { + assertThat(geoQuery.lat(), equalTo(queryBuilder.point().lat())); + assertThat(geoQuery.lon(), equalTo(queryBuilder.point().lon())); + } + assertThat(geoQuery.geoDistance(), equalTo(queryBuilder.geoDistance())); + if (queryBuilder.from() != null && queryBuilder.from() instanceof Number) { + double fromValue = ((Number) queryBuilder.from()).doubleValue(); + if (queryBuilder.unit() != null) { + fromValue = queryBuilder.unit().toMeters(fromValue); + } + if (queryBuilder.geoDistance() != null) { + fromValue = queryBuilder.geoDistance().normalize(fromValue, DistanceUnit.DEFAULT); + } + assertThat(geoQuery.minInclusiveDistance(), closeTo(fromValue, Math.abs(fromValue) / 1000)); + } + if (queryBuilder.to() != null && queryBuilder.to() instanceof Number) { + double toValue = ((Number) queryBuilder.to()).doubleValue(); + if (queryBuilder.unit() != null) { + toValue = queryBuilder.unit().toMeters(toValue); + } + if (queryBuilder.geoDistance() != null) { + toValue = queryBuilder.geoDistance().normalize(toValue, DistanceUnit.DEFAULT); + } + assertThat(geoQuery.maxInclusiveDistance(), closeTo(toValue, Math.abs(toValue) / 1000)); + } + } + + /** + * Overridden here to ensure the test is only run if at least one type is + * present in the mappings. Geo queries do not execute if the field is not + * explicitly mapped + */ + @Override + @Test + public void testToQuery() throws IOException { + assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); + super.testToQuery(); + } + + @Test(expected=IllegalArgumentException.class) + public void testNullFieldName() { + if (randomBoolean()) { + new GeoDistanceRangeQueryBuilder(null, new GeoPoint()); + } else { + new GeoDistanceRangeQueryBuilder("", new GeoPoint()); + } + } + + @Test(expected=IllegalArgumentException.class) + public void testNoPoint() { + if (randomBoolean()) { + new GeoDistanceRangeQueryBuilder(GEO_POINT_FIELD_NAME, (GeoPoint) null); + } else { + new GeoDistanceRangeQueryBuilder(GEO_POINT_FIELD_NAME, (String) null); + } + } + + @Test(expected=IllegalArgumentException.class) + public void testInvalidFrom() { + GeoDistanceRangeQueryBuilder builder = new GeoDistanceRangeQueryBuilder(GEO_POINT_FIELD_NAME, new GeoPoint()); + if (randomBoolean()) { + builder.from((String) null); + } else { + builder.from((Number) null); + } + } + + @Test(expected=IllegalArgumentException.class) + public void testInvalidTo() { + GeoDistanceRangeQueryBuilder builder = new GeoDistanceRangeQueryBuilder(GEO_POINT_FIELD_NAME, new GeoPoint()); + if (randomBoolean()) { + builder.to((String) null); + } else { + builder.to((Number) null); + } + } + + @Test(expected=IllegalArgumentException.class) + public void testInvalidOptimizeBBox() { + GeoDistanceRangeQueryBuilder builder = new GeoDistanceRangeQueryBuilder(GEO_POINT_FIELD_NAME, new GeoPoint()); + if (randomBoolean()) { + builder.optimizeBbox(null); + } else { + builder.optimizeBbox("foo"); + } + } + + @Test(expected=IllegalArgumentException.class) + public void testInvalidGeoDistance() { + GeoDistanceRangeQueryBuilder builder = new GeoDistanceRangeQueryBuilder(GEO_POINT_FIELD_NAME, new GeoPoint()); + builder.geoDistance(null); + } + + @Test(expected=IllegalArgumentException.class) + public void testInvalidDistanceUnit() { + GeoDistanceRangeQueryBuilder builder = new GeoDistanceRangeQueryBuilder(GEO_POINT_FIELD_NAME, new GeoPoint()); + builder.unit(null); + } +} diff --git a/core/src/test/java/org/elasticsearch/index/query/GeoPolygonQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/GeoPolygonQueryBuilderTests.java new file mode 100644 index 00000000000..1825398f1b6 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/query/GeoPolygonQueryBuilderTests.java @@ -0,0 +1,271 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query; + +import com.spatial4j.core.shape.jts.JtsGeometry; +import com.vividsolutions.jts.geom.Coordinate; + +import org.apache.lucene.search.Query; +import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.geo.GeoPoint; +import org.elasticsearch.common.geo.GeoUtils; +import org.elasticsearch.common.geo.builders.ShapeBuilder; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.index.search.geo.GeoPolygonQuery; +import org.elasticsearch.test.geo.RandomShapeGenerator; +import org.elasticsearch.test.geo.RandomShapeGenerator.ShapeType; +import org.junit.Test; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; +import static org.hamcrest.Matchers.closeTo; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; + +public class GeoPolygonQueryBuilderTests extends AbstractQueryTestCase { + + @Override + protected GeoPolygonQueryBuilder doCreateTestQueryBuilder() { + List polygon = randomPolygon(randomIntBetween(4, 50)); + GeoPolygonQueryBuilder builder = new GeoPolygonQueryBuilder(GEO_POINT_FIELD_NAME, polygon); + if (randomBoolean()) { + builder.setValidationMethod(randomFrom(GeoValidationMethod.values())); + } + return builder; + } + + @Override + protected void doAssertLuceneQuery(GeoPolygonQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { + assertThat(query, instanceOf(GeoPolygonQuery.class)); + GeoPolygonQuery geoQuery = (GeoPolygonQuery) query; + assertThat(geoQuery.fieldName(), equalTo(queryBuilder.fieldName())); + List queryBuilderPoints = queryBuilder.points(); + GeoPoint[] queryPoints = geoQuery.points(); + assertThat(queryPoints.length, equalTo(queryBuilderPoints.size())); + if (GeoValidationMethod.isCoerce(queryBuilder.getValidationMethod())) { + for (int i = 0; i < queryBuilderPoints.size(); i++) { + GeoPoint queryBuilderPoint = queryBuilderPoints.get(i); + GeoUtils.normalizePoint(queryBuilderPoint, true, true); + assertThat(queryPoints[i], equalTo(queryBuilderPoint)); + } + } else { + for (int i = 0; i < queryBuilderPoints.size(); i++) { + assertThat(queryPoints[i], equalTo(queryBuilderPoints.get(i))); + } + } + } + + /** + * Overridden here to ensure the test is only run if at least one type is + * present in the mappings. Geo queries do not execute if the field is not + * explicitly mapped + */ + @Override + public void testToQuery() throws IOException { + assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); + super.testToQuery(); + } + + public List randomPolygon(int numPoints) { + ShapeBuilder shapeBuilder = null; + // This is a temporary fix because sometimes the RandomShapeGenerator + // returns null. This is if there is an error generating the polygon. So + // in this case keep trying until we successfully generate one + while (shapeBuilder == null) { + shapeBuilder = RandomShapeGenerator.createShapeWithin(getRandom(), null, ShapeType.POLYGON); + } + JtsGeometry shape = (JtsGeometry) shapeBuilder.build(); + Coordinate[] coordinates = shape.getGeom().getCoordinates(); + ArrayList polygonPoints = new ArrayList<>(); + for (Coordinate coord : coordinates) { + polygonPoints.add(new GeoPoint(coord.y, coord.x)); + } + return polygonPoints; + } + + @Test(expected = IllegalArgumentException.class) + public void testNullFieldName() { + new GeoPolygonQueryBuilder(null, randomPolygon(5)); + } + + @Test(expected = IllegalArgumentException.class) + public void testEmptyPolygon() { + if (randomBoolean()) { + new GeoPolygonQueryBuilder(GEO_POINT_FIELD_NAME, new ArrayList()); + } else { + new GeoPolygonQueryBuilder(GEO_POINT_FIELD_NAME, null); + } + } + + @Test(expected=IllegalArgumentException.class) + public void testInvalidClosedPolygon() { + List points = new ArrayList<>(); + points.add(new GeoPoint(0, 90)); + points.add(new GeoPoint(90, 90)); + points.add(new GeoPoint(0, 90)); + new GeoPolygonQueryBuilder(GEO_POINT_FIELD_NAME, points); + + } + + @Test(expected=IllegalArgumentException.class) + public void testInvalidOpenPolygon() { + List points = new ArrayList<>(); + points.add(new GeoPoint(0, 90)); + points.add(new GeoPoint(90, 90)); + new GeoPolygonQueryBuilder(GEO_POINT_FIELD_NAME, points); + } + + public void testDeprecatedXContent() throws IOException { + XContentBuilder builder = XContentFactory.jsonBuilder().prettyPrint(); + builder.startObject(); + builder.startObject("geo_polygon"); + builder.startObject(GEO_POINT_FIELD_NAME); + builder.startArray("points"); + builder.value("0,0"); + builder.value("0,90"); + builder.value("90,90"); + builder.value("90,0"); + builder.endArray(); + builder.endObject(); + builder.field("normalize", true); // deprecated + builder.endObject(); + builder.endObject(); + try { + parseQuery(builder.string()); + fail("normalize is deprecated"); + } catch (IllegalArgumentException ex) { + assertEquals("Deprecated field [normalize] used, expected [coerce] instead", ex.getMessage()); + } + } + + @Test + public void testParsingAndToQueryParsingExceptions() throws IOException { + String[] brokenFiles = new String[]{ + "/org/elasticsearch/index/query/geo_polygon_exception_1.json", + "/org/elasticsearch/index/query/geo_polygon_exception_2.json", + "/org/elasticsearch/index/query/geo_polygon_exception_3.json", + "/org/elasticsearch/index/query/geo_polygon_exception_4.json", + "/org/elasticsearch/index/query/geo_polygon_exception_5.json" + }; + for (String brokenFile : brokenFiles) { + String query = copyToStringFromClasspath(brokenFile); + try { + parseQuery(query); + fail("parsing a broken geo_polygon filter didn't fail as expected while parsing: " + brokenFile); + } catch (ParsingException e) { + // success! + } + } + } + + @Test + public void testParsingAndToQuery1() throws IOException { + assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); + String query = "{\n" + + " \"geo_polygon\":{\n" + + " \"" + GEO_POINT_FIELD_NAME + "\":{\n" + + " \"points\":[\n" + + " [-70, 40],\n" + + " [-80, 30],\n" + + " [-90, 20]\n" + + " ]\n" + + " }\n" + + " }\n" + + "}\n"; + assertGeoPolygonQuery(query); + } + + @Test + public void testParsingAndToQuery2() throws IOException { + assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); + String query = "{\n" + + " \"geo_polygon\":{\n" + + " \"" + GEO_POINT_FIELD_NAME + "\":{\n" + + " \"points\":[\n" + + " {\n" + + " \"lat\":40,\n" + + " \"lon\":-70\n" + + " },\n" + + " {\n" + + " \"lat\":30,\n" + + " \"lon\":-80\n" + + " },\n" + + " {\n" + + " \"lat\":20,\n" + + " \"lon\":-90\n" + + " }\n" + + " ]\n" + + " }\n" + + " }\n" + + "}\n"; + assertGeoPolygonQuery(query); + } + + @Test + public void testParsingAndToQuery3() throws IOException { + assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); + String query = "{\n" + + " \"geo_polygon\":{\n" + + " \"" + GEO_POINT_FIELD_NAME + "\":{\n" + + " \"points\":[\n" + + " \"40, -70\",\n" + + " \"30, -80\",\n" + + " \"20, -90\"\n" + + " ]\n" + + " }\n" + + " }\n" + + "}\n"; + assertGeoPolygonQuery(query); + } + + @Test + public void testParsingAndToQuery4() throws IOException { + assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); + String query = "{\n" + + " \"geo_polygon\":{\n" + + " \"" + GEO_POINT_FIELD_NAME + "\":{\n" + + " \"points\":[\n" + + " \"drn5x1g8cu2y\",\n" + + " \"30, -80\",\n" + + " \"20, -90\"\n" + + " ]\n" + + " }\n" + + " }\n" + + "}\n"; + assertGeoPolygonQuery(query); + } + + private void assertGeoPolygonQuery(String query) throws IOException { + Query parsedQuery = parseQuery(query).toQuery(createShardContext()); + GeoPolygonQuery filter = (GeoPolygonQuery) parsedQuery; + assertThat(filter.fieldName(), equalTo(GEO_POINT_FIELD_NAME)); + assertThat(filter.points().length, equalTo(4)); + assertThat(filter.points()[0].lat(), closeTo(40, 0.00001)); + assertThat(filter.points()[0].lon(), closeTo(-70, 0.00001)); + assertThat(filter.points()[1].lat(), closeTo(30, 0.00001)); + assertThat(filter.points()[1].lon(), closeTo(-80, 0.00001)); + assertThat(filter.points()[2].lat(), closeTo(20, 0.00001)); + assertThat(filter.points()[2].lon(), closeTo(-90, 0.00001)); + } +} diff --git a/core/src/test/java/org/elasticsearch/index/query/GeoShapeQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/GeoShapeQueryBuilderTests.java index 7a4d1d9a53d..a4ac66c658c 100644 --- a/core/src/test/java/org/elasticsearch/index/query/GeoShapeQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/GeoShapeQueryBuilderTests.java @@ -19,13 +19,180 @@ package org.elasticsearch.index.query; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.ConstantScoreQuery; +import org.apache.lucene.search.Query; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.get.GetRequest; +import org.elasticsearch.action.get.GetResponse; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.geo.ShapeRelation; +import org.elasticsearch.common.geo.SpatialStrategy; import org.elasticsearch.common.geo.builders.EnvelopeBuilder; import org.elasticsearch.common.geo.builders.ShapeBuilder; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.json.JsonXContent; -import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.index.get.GetResult; +import org.elasticsearch.test.geo.RandomShapeGenerator; +import org.junit.After; import org.junit.Test; -public class GeoShapeQueryBuilderTests extends ESTestCase { +import java.io.IOException; + +import static org.hamcrest.Matchers.anyOf; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.notNullValue; + +public class GeoShapeQueryBuilderTests extends AbstractQueryTestCase { + + private static String indexedShapeId; + private static String indexedShapeType; + private static String indexedShapePath; + private static String indexedShapeIndex; + private static ShapeBuilder indexedShapeToReturn; + + @Override + protected GeoShapeQueryBuilder doCreateTestQueryBuilder() { + ShapeBuilder shape = RandomShapeGenerator.createShapeWithin(getRandom(), null); + GeoShapeQueryBuilder builder; + if (randomBoolean()) { + try { + builder = new GeoShapeQueryBuilder(GEO_SHAPE_FIELD_NAME, shape); + } catch (IOException e) { + throw new RuntimeException(e); + } + } else { + indexedShapeToReturn = shape; + indexedShapeId = randomAsciiOfLengthBetween(3, 20); + indexedShapeType = randomAsciiOfLengthBetween(3, 20); + builder = new GeoShapeQueryBuilder(GEO_SHAPE_FIELD_NAME, indexedShapeId, indexedShapeType); + if (randomBoolean()) { + indexedShapeIndex = randomAsciiOfLengthBetween(3, 20); + builder.indexedShapeIndex(indexedShapeIndex); + } + if (randomBoolean()) { + indexedShapePath = randomAsciiOfLengthBetween(3, 20); + builder.indexedShapePath(indexedShapePath); + } + } + SpatialStrategy strategy = randomFrom(SpatialStrategy.values()); + builder.strategy(strategy); + if (strategy != SpatialStrategy.TERM) { + builder.relation(randomFrom(ShapeRelation.values())); + } + return builder; + } + + @Override + protected GetResponse executeGet(GetRequest getRequest) { + assertThat(indexedShapeToReturn, notNullValue()); + assertThat(indexedShapeId, notNullValue()); + assertThat(indexedShapeType, notNullValue()); + assertThat(getRequest.id(), equalTo(indexedShapeId)); + assertThat(getRequest.type(), equalTo(indexedShapeType)); + String expectedShapeIndex = indexedShapeIndex == null ? GeoShapeQueryBuilder.DEFAULT_SHAPE_INDEX_NAME : indexedShapeIndex; + assertThat(getRequest.index(), equalTo(expectedShapeIndex)); + String expectedShapePath = indexedShapePath == null ? GeoShapeQueryBuilder.DEFAULT_SHAPE_FIELD_NAME : indexedShapePath; + String json; + try { + XContentBuilder builder = XContentFactory.jsonBuilder().prettyPrint(); + builder.startObject(); + builder.field(expectedShapePath, indexedShapeToReturn); + builder.endObject(); + json = builder.string(); + } catch (IOException ex) { + throw new ElasticsearchException("boom", ex); + } + GetResponse response = new GetResponse(new GetResult(indexedShapeIndex, indexedShapeType, indexedShapeId, 0, true, new BytesArray( + json), null)); + return response; + } + + @After + public void clearShapeFields() { + indexedShapeToReturn = null; + indexedShapeId = null; + indexedShapeType = null; + indexedShapePath = null; + indexedShapeIndex = null; + } + + @Override + protected void doAssertLuceneQuery(GeoShapeQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { + // Logic for doToQuery is complex and is hard to test here. Need to rely + // on Integration tests to determine if created query is correct + // TODO improve GeoShapeQueryBuilder.doToQuery() method to make it + // easier to test here + assertThat(query, anyOf(instanceOf(BooleanQuery.class), instanceOf(ConstantScoreQuery.class))); + } + + /** + * Overridden here to ensure the test is only run if at least one type is + * present in the mappings. Geo queries do not execute if the field is not + * explicitly mapped + */ + @Override + public void testToQuery() throws IOException { + //TODO figure out why this test might take up to 10 seconds once in a while + assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); + super.testToQuery(); + } + + @Test(expected = IllegalArgumentException.class) + public void testNoFieldName() throws Exception { + ShapeBuilder shape = RandomShapeGenerator.createShapeWithin(getRandom(), null); + new GeoShapeQueryBuilder(null, shape); + } + + @Test + public void testNoShape() throws IOException { + try { + GeoShapeQueryBuilder builder = new GeoShapeQueryBuilder(GEO_SHAPE_FIELD_NAME, (ShapeBuilder) null); + fail("exception expected"); + } catch (IllegalArgumentException e) { + // expected + } + } + + @Test(expected = IllegalArgumentException.class) + public void testNoIndexedShape() throws IOException { + new GeoShapeQueryBuilder(GEO_SHAPE_FIELD_NAME, (String) null, "type"); + } + + @Test(expected = IllegalArgumentException.class) + public void testNoIndexedShapeType() throws IOException { + new GeoShapeQueryBuilder(GEO_SHAPE_FIELD_NAME, "id", (String) null); + } + + @Test(expected=IllegalArgumentException.class) + public void testNoRelation() throws IOException { + ShapeBuilder shape = RandomShapeGenerator.createShapeWithin(getRandom(), null); + GeoShapeQueryBuilder builder = new GeoShapeQueryBuilder(GEO_SHAPE_FIELD_NAME, shape); + builder.relation(null); + } + + @Test + public void testInvalidRelation() throws IOException { + ShapeBuilder shape = RandomShapeGenerator.createShapeWithin(getRandom(), null); + GeoShapeQueryBuilder builder = new GeoShapeQueryBuilder(GEO_SHAPE_FIELD_NAME, shape); + try { + builder.strategy(SpatialStrategy.TERM); + builder.relation(randomFrom(ShapeRelation.DISJOINT, ShapeRelation.WITHIN)); + fail("Illegal combination of strategy and relation setting"); + } catch (IllegalArgumentException e) { + // okay + } + + try { + builder.relation(randomFrom(ShapeRelation.DISJOINT, ShapeRelation.WITHIN)); + builder.strategy(SpatialStrategy.TERM); + fail("Illegal combination of strategy and relation setting"); + } catch (IllegalArgumentException e) { + // okay + } + } @Test // see #3878 public void testThatXContentSerializationInsideOfArrayWorks() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/index/query/GeohashCellQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/GeohashCellQueryBuilderTests.java new file mode 100644 index 00000000000..0db757f4a5f --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/query/GeohashCellQueryBuilderTests.java @@ -0,0 +1,108 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query; + +import org.apache.lucene.index.Term; +import org.apache.lucene.queries.TermsQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.TermQuery; +import org.elasticsearch.common.geo.GeoPoint; +import org.elasticsearch.common.unit.DistanceUnit; +import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper; +import org.elasticsearch.index.query.GeohashCellQuery.Builder; +import org.junit.Test; + +import java.io.IOException; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; + +public class GeohashCellQueryBuilderTests extends AbstractQueryTestCase { + + @Override + protected Builder doCreateTestQueryBuilder() { + GeohashCellQuery.Builder builder = new Builder(GEO_POINT_FIELD_NAME, randomGeohash(1, 12)); + if (randomBoolean()) { + builder.neighbors(randomBoolean()); + } + if (randomBoolean()) { + if (randomBoolean()) { + builder.precision(randomIntBetween(1, 12)); + } else { + builder.precision(randomIntBetween(1, 1000000) + randomFrom(DistanceUnit.values()).toString()); + } + } + return builder; + } + + @Override + protected void doAssertLuceneQuery(Builder queryBuilder, Query query, QueryShardContext context) throws IOException { + if (queryBuilder.neighbors()) { + assertThat(query, instanceOf(TermsQuery.class)); + } else { + assertThat(query, instanceOf(TermQuery.class)); + TermQuery termQuery = (TermQuery) query; + Term term = termQuery.getTerm(); + assertThat(term.field(), equalTo(queryBuilder.fieldName() + GeoPointFieldMapper.Names.GEOHASH_SUFFIX)); + String geohash = queryBuilder.geohash(); + if (queryBuilder.precision() != null) { + int len = Math.min(queryBuilder.precision(), geohash.length()); + geohash = geohash.substring(0, len); + } + assertThat(term.text(), equalTo(geohash)); + } + } + + /** + * Overridden here to ensure the test is only run if at least one type is + * present in the mappings. Geo queries do not execute if the field is not + * explicitly mapped + */ + @Override + public void testToQuery() throws IOException { + assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); + super.testToQuery(); + } + + @Test(expected=IllegalArgumentException.class) + public void testNullField() { + if (randomBoolean()) { + new Builder(null, new GeoPoint()); + } else { + new Builder("", new GeoPoint()); + } + } + + @Test(expected=IllegalArgumentException.class) + public void testNullGeoPoint() { + if (randomBoolean()) { + new Builder(GEO_POINT_FIELD_NAME, (GeoPoint) null); + } else { + new Builder(GEO_POINT_FIELD_NAME, ""); + } + } + + @Test(expected=IllegalArgumentException.class) + public void testInvalidPrecision() { + GeohashCellQuery.Builder builder = new Builder(GEO_POINT_FIELD_NAME, new GeoPoint()); + builder.precision(-1); + } + +} diff --git a/core/src/test/java/org/elasticsearch/index/query/HasChildQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/HasChildQueryBuilderTests.java new file mode 100644 index 00000000000..a307cf180c5 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/query/HasChildQueryBuilderTests.java @@ -0,0 +1,202 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query; + +import com.carrotsearch.randomizedtesting.generators.RandomPicks; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.join.ScoreMode; +import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; +import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.index.fielddata.IndexFieldDataService; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.query.support.QueryInnerHits; +import org.elasticsearch.search.fetch.innerhits.InnerHitsBuilder; +import org.elasticsearch.search.fetch.innerhits.InnerHitsContext; +import org.elasticsearch.search.internal.SearchContext; +import org.elasticsearch.search.sort.SortOrder; +import org.elasticsearch.test.TestSearchContext; + +import java.io.IOException; + +import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; +import static org.hamcrest.CoreMatchers.instanceOf; + +public class HasChildQueryBuilderTests extends AbstractQueryTestCase { + protected static final String PARENT_TYPE = "parent"; + protected static final String CHILD_TYPE = "child"; + + public void setUp() throws Exception { + super.setUp(); + MapperService mapperService = queryParserService().mapperService; + mapperService.merge(PARENT_TYPE, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(PARENT_TYPE, + STRING_FIELD_NAME, "type=string", + INT_FIELD_NAME, "type=integer", + DOUBLE_FIELD_NAME, "type=double", + BOOLEAN_FIELD_NAME, "type=boolean", + DATE_FIELD_NAME, "type=date", + OBJECT_FIELD_NAME, "type=object" + ).string()), false, false); + mapperService.merge(CHILD_TYPE, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(CHILD_TYPE, + "_parent", "type=" + PARENT_TYPE, + STRING_FIELD_NAME, "type=string", + INT_FIELD_NAME, "type=integer", + DOUBLE_FIELD_NAME, "type=double", + BOOLEAN_FIELD_NAME, "type=boolean", + DATE_FIELD_NAME, "type=date", + OBJECT_FIELD_NAME, "type=object" + ).string()), false, false); + } + + protected void setSearchContext(String[] types) { + final MapperService mapperService = queryParserService().mapperService; + final IndexFieldDataService fieldData = queryParserService().fieldDataService; + TestSearchContext testSearchContext = new TestSearchContext() { + private InnerHitsContext context; + + + @Override + public void innerHits(InnerHitsContext innerHitsContext) { + context = innerHitsContext; + } + + @Override + public InnerHitsContext innerHits() { + return context; + } + + @Override + public MapperService mapperService() { + return mapperService; // need to build / parse inner hits sort fields + } + + @Override + public IndexFieldDataService fieldData() { + return fieldData; // need to build / parse inner hits sort fields + } + }; + testSearchContext.setTypes(types); + SearchContext.setCurrent(testSearchContext); + } + + /** + * @return a {@link HasChildQueryBuilder} with random values all over the place + */ + @Override + protected HasChildQueryBuilder doCreateTestQueryBuilder() { + int min = randomIntBetween(0, Integer.MAX_VALUE / 2); + int max = randomIntBetween(min, Integer.MAX_VALUE); + InnerHitsBuilder.InnerHit innerHit = new InnerHitsBuilder.InnerHit().setSize(100).addSort(STRING_FIELD_NAME, SortOrder.ASC); + return new HasChildQueryBuilder(CHILD_TYPE, + RandomQueryBuilder.createQuery(random()), max, min, + RandomPicks.randomFrom(random(), ScoreMode.values()), + randomBoolean() ? null : new QueryInnerHits("inner_hits_name", innerHit)); + } + + @Override + protected void doAssertLuceneQuery(HasChildQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { + QueryBuilder innerQueryBuilder = queryBuilder.query(); + if (innerQueryBuilder instanceof EmptyQueryBuilder) { + assertNull(query); + } else { + assertThat(query, instanceOf(HasChildQueryBuilder.LateParsingQuery.class)); + HasChildQueryBuilder.LateParsingQuery lpq = (HasChildQueryBuilder.LateParsingQuery) query; + assertEquals(queryBuilder.minChildren(), lpq.getMinChildren()); + assertEquals(queryBuilder.maxChildren(), lpq.getMaxChildren()); + assertEquals(queryBuilder.scoreMode(), lpq.getScoreMode()); // WTF is this why do we have two? + } + if (queryBuilder.innerHit() != null) { + assertNotNull(SearchContext.current()); + if (query != null) { + assertNotNull(SearchContext.current().innerHits()); + assertEquals(1, SearchContext.current().innerHits().getInnerHits().size()); + assertTrue(SearchContext.current().innerHits().getInnerHits().containsKey("inner_hits_name")); + InnerHitsContext.BaseInnerHits innerHits = SearchContext.current().innerHits().getInnerHits().get("inner_hits_name"); + assertEquals(innerHits.size(), 100); + assertEquals(innerHits.sort().getSort().length, 1); + assertEquals(innerHits.sort().getSort()[0].getField(), STRING_FIELD_NAME); + } else { + assertNull(SearchContext.current().innerHits()); + } + } + } + + public void testIllegalValues() { + QueryBuilder query = RandomQueryBuilder.createQuery(random()); + try { + new HasChildQueryBuilder(null, query); + fail("must not be null"); + } catch (IllegalArgumentException ex) { + + } + + try { + new HasChildQueryBuilder("foo", null); + fail("must not be null"); + } catch (IllegalArgumentException ex) { + + } + HasChildQueryBuilder foo = new HasChildQueryBuilder("foo", query);// all good + try { + foo.scoreMode(null); + fail("must not be null"); + } catch (IllegalArgumentException ex) { + + } + final int positiveValue = randomIntBetween(0, Integer.MAX_VALUE); + try { + foo.minChildren(randomIntBetween(Integer.MIN_VALUE, -1)); + fail("must not be negative"); + } catch (IllegalArgumentException ex) { + + } + foo.minChildren(positiveValue); + assertEquals(positiveValue, foo.minChildren()); + try { + foo.maxChildren(randomIntBetween(Integer.MIN_VALUE, -1)); + fail("must not be negative"); + } catch (IllegalArgumentException ex) { + + } + + foo.maxChildren(positiveValue); + assertEquals(positiveValue, foo.maxChildren()); + } + + public void testParseFromJSON() throws IOException { + String query = copyToStringFromClasspath("/org/elasticsearch/index/query/has-child-with-inner-hits.json").trim(); + HasChildQueryBuilder queryBuilder = (HasChildQueryBuilder) parseQuery(query); + assertEquals(query, queryBuilder.maxChildren(), 1217235442); + assertEquals(query, queryBuilder.minChildren(), 883170873); + assertEquals(query, queryBuilder.boost(), 2.0f, 0.0f); + assertEquals(query, queryBuilder.queryName(), "WNzYMJKRwePuRBh"); + assertEquals(query, queryBuilder.childType(), "child"); + assertEquals(query, queryBuilder.scoreMode(), ScoreMode.Avg); + assertNotNull(query, queryBuilder.innerHit()); + assertEquals(query, queryBuilder.innerHit(), new QueryInnerHits("inner_hits_name", new InnerHitsBuilder.InnerHit().setSize(100).addSort("mapped_string", SortOrder.ASC))); + // now assert that we actually generate the same JSON + XContentBuilder builder = XContentFactory.jsonBuilder().prettyPrint(); + queryBuilder.toXContent(builder, ToXContent.EMPTY_PARAMS); + assertEquals(query, builder.string()); + } + +} diff --git a/core/src/test/java/org/elasticsearch/index/query/HasParentQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/HasParentQueryBuilderTests.java new file mode 100644 index 00000000000..9366c08cbab --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/query/HasParentQueryBuilderTests.java @@ -0,0 +1,194 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query; + +import com.carrotsearch.randomizedtesting.generators.RandomPicks; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.join.ScoreMode; +import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; +import org.elasticsearch.common.ParseFieldMatcher; +import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.xcontent.*; +import org.elasticsearch.index.fielddata.IndexFieldDataService; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.query.support.QueryInnerHits; +import org.elasticsearch.search.fetch.innerhits.InnerHitsBuilder; +import org.elasticsearch.search.fetch.innerhits.InnerHitsContext; +import org.elasticsearch.search.internal.SearchContext; +import org.elasticsearch.search.sort.SortOrder; +import org.elasticsearch.test.TestSearchContext; + +import java.io.IOException; +import java.util.Arrays; + +import static org.hamcrest.CoreMatchers.instanceOf; + +public class HasParentQueryBuilderTests extends AbstractQueryTestCase { + protected static final String PARENT_TYPE = "parent"; + protected static final String CHILD_TYPE = "child"; + + public void setUp() throws Exception { + super.setUp(); + MapperService mapperService = queryParserService().mapperService; + mapperService.merge(PARENT_TYPE, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(PARENT_TYPE, + STRING_FIELD_NAME, "type=string", + INT_FIELD_NAME, "type=integer", + DOUBLE_FIELD_NAME, "type=double", + BOOLEAN_FIELD_NAME, "type=boolean", + DATE_FIELD_NAME, "type=date", + OBJECT_FIELD_NAME, "type=object" + ).string()), false, false); + mapperService.merge(CHILD_TYPE, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(CHILD_TYPE, + "_parent", "type=" + PARENT_TYPE, + STRING_FIELD_NAME, "type=string", + INT_FIELD_NAME, "type=integer", + DOUBLE_FIELD_NAME, "type=double", + BOOLEAN_FIELD_NAME, "type=boolean", + DATE_FIELD_NAME, "type=date", + OBJECT_FIELD_NAME, "type=object" + ).string()), false, false); + } + + protected void setSearchContext(String[] types) { + final MapperService mapperService = queryParserService().mapperService; + final IndexFieldDataService fieldData = queryParserService().fieldDataService; + TestSearchContext testSearchContext = new TestSearchContext() { + private InnerHitsContext context; + + + @Override + public void innerHits(InnerHitsContext innerHitsContext) { + context = innerHitsContext; + } + + @Override + public InnerHitsContext innerHits() { + return context; + } + + @Override + public MapperService mapperService() { + return mapperService; // need to build / parse inner hits sort fields + } + + @Override + public IndexFieldDataService fieldData() { + return fieldData; // need to build / parse inner hits sort fields + } + }; + testSearchContext.setTypes(types); + SearchContext.setCurrent(testSearchContext); + } + + /** + * @return a {@link HasChildQueryBuilder} with random values all over the place + */ + @Override + protected HasParentQueryBuilder doCreateTestQueryBuilder() { + InnerHitsBuilder.InnerHit innerHit = new InnerHitsBuilder.InnerHit().setSize(100).addSort(STRING_FIELD_NAME, SortOrder.ASC); + return new HasParentQueryBuilder(PARENT_TYPE, + RandomQueryBuilder.createQuery(random()),randomBoolean(), + randomBoolean() ? null : new QueryInnerHits("inner_hits_name", innerHit)); + } + + @Override + protected void doAssertLuceneQuery(HasParentQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { + QueryBuilder innerQueryBuilder = queryBuilder.query(); + if (innerQueryBuilder instanceof EmptyQueryBuilder) { + assertNull(query); + } else { + assertThat(query, instanceOf(HasChildQueryBuilder.LateParsingQuery.class)); + HasChildQueryBuilder.LateParsingQuery lpq = (HasChildQueryBuilder.LateParsingQuery) query; + assertEquals(queryBuilder.score() ? ScoreMode.Max : ScoreMode.None, lpq.getScoreMode()); + } + if (queryBuilder.innerHit() != null) { + assertNotNull(SearchContext.current()); + if (query != null) { + assertNotNull(SearchContext.current().innerHits()); + assertEquals(1, SearchContext.current().innerHits().getInnerHits().size()); + assertTrue(SearchContext.current().innerHits().getInnerHits().containsKey("inner_hits_name")); + InnerHitsContext.BaseInnerHits innerHits = SearchContext.current().innerHits().getInnerHits().get("inner_hits_name"); + assertEquals(innerHits.size(), 100); + assertEquals(innerHits.sort().getSort().length, 1); + assertEquals(innerHits.sort().getSort()[0].getField(), STRING_FIELD_NAME); + } else { + assertNull(SearchContext.current().innerHits()); + } + } + } + + public void testIllegalValues() { + QueryBuilder query = RandomQueryBuilder.createQuery(random()); + try { + new HasParentQueryBuilder(null, query); + fail("must not be null"); + } catch (IllegalArgumentException ex) { + + } + + try { + new HasParentQueryBuilder("foo", null); + fail("must not be null"); + } catch (IllegalArgumentException ex) { + + } + } + + public void testDeprecatedXContent() throws IOException { + XContentBuilder builder = XContentFactory.jsonBuilder().prettyPrint(); + builder.startObject(); + builder.startObject("has_parent"); + builder.field("query"); + EmptyQueryBuilder.PROTOTYPE.toXContent(builder, ToXContent.EMPTY_PARAMS); + builder.field("type", "foo"); // deprecated + builder.endObject(); + builder.endObject(); + try { + parseQuery(builder.string()); + fail("type is deprecated"); + } catch (IllegalArgumentException ex) { + assertEquals("Deprecated field [type] used, expected [parent_type] instead", ex.getMessage()); + } + + HasParentQueryBuilder queryBuilder = (HasParentQueryBuilder) parseQuery(builder.string(), ParseFieldMatcher.EMPTY); + assertEquals("foo", queryBuilder.type()); + + boolean score = randomBoolean(); + String key = RandomPicks.randomFrom(random(), Arrays.asList("score_mode", "scoreMode")); + builder = XContentFactory.jsonBuilder().prettyPrint(); + builder.startObject(); + builder.startObject("has_parent"); + builder.field("query"); + EmptyQueryBuilder.PROTOTYPE.toXContent(builder, ToXContent.EMPTY_PARAMS); + builder.field(key, score ? "score": "none"); + builder.field("parent_type", "foo"); + builder.endObject(); + builder.endObject(); + try { + parseQuery(builder.string()); + fail(key + " is deprecated"); + } catch (IllegalArgumentException ex) { + assertEquals("Deprecated field [" + key + "] used, replaced by [score]", ex.getMessage()); + } + + queryBuilder = (HasParentQueryBuilder) parseQuery(builder.string(), ParseFieldMatcher.EMPTY); + assertEquals(score, queryBuilder.score()); + } +} diff --git a/core/src/test/java/org/elasticsearch/index/query/IdsQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/IdsQueryBuilderTests.java new file mode 100644 index 00000000000..7ec3e3f8d25 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/query/IdsQueryBuilderTests.java @@ -0,0 +1,124 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query; + + +import org.apache.lucene.queries.TermsQuery; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.Query; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.common.ParsingException; +import org.junit.Test; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.instanceOf; + +public class IdsQueryBuilderTests extends AbstractQueryTestCase { + + /** + * check that parser throws exception on missing values field + */ + @Test(expected=ParsingException.class) + public void testIdsNotProvided() throws IOException { + String noIdsFieldQuery = "{\"ids\" : { \"type\" : \"my_type\" }"; + parseQuery(noIdsFieldQuery); + } + + @Override + protected IdsQueryBuilder doCreateTestQueryBuilder() { + String[] types; + if (getCurrentTypes().length > 0 && randomBoolean()) { + int numberOfTypes = randomIntBetween(1, getCurrentTypes().length); + types = new String[numberOfTypes]; + for (int i = 0; i < numberOfTypes; i++) { + if (frequently()) { + types[i] = randomFrom(getCurrentTypes()); + } else { + types[i] = randomAsciiOfLengthBetween(1, 10); + } + } + } else { + if (randomBoolean()) { + types = new String[]{MetaData.ALL}; + } else { + types = new String[0]; + } + } + int numberOfIds = randomIntBetween(0, 10); + String[] ids = new String[numberOfIds]; + for (int i = 0; i < numberOfIds; i++) { + ids[i] = randomAsciiOfLengthBetween(1, 10); + } + IdsQueryBuilder query; + if (types.length > 0 || randomBoolean()) { + query = new IdsQueryBuilder(types); + query.addIds(ids); + } else { + query = new IdsQueryBuilder(); + query.addIds(ids); + } + return query; + } + + @Override + protected void doAssertLuceneQuery(IdsQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { + if (queryBuilder.ids().size() == 0) { + assertThat(query, instanceOf(BooleanQuery.class)); + assertThat(((BooleanQuery)query).clauses().size(), equalTo(0)); + } else { + assertThat(query, instanceOf(TermsQuery.class)); + } + } + + @Override + protected Map getAlternateVersions() { + Map alternateVersions = new HashMap<>(); + + IdsQueryBuilder tempQuery = createTestQueryBuilder(); + if (tempQuery.types() != null && tempQuery.types().length > 0) { + String type = tempQuery.types()[0]; + IdsQueryBuilder testQuery = new IdsQueryBuilder(type); + + //single value type can also be called _type + String contentString1 = "{\n" + + " \"ids\" : {\n" + + " \"_type\" : \"" + type + "\",\n" + + " \"values\" : []\n" + + " }\n" + + "}"; + alternateVersions.put(contentString1, testQuery); + + //array of types can also be called type rather than types + String contentString2 = "{\n" + + " \"ids\" : {\n" + + " \"type\" : [\"" + type + "\"],\n" + + " \"values\" : []\n" + + " }\n" + + "}"; + alternateVersions.put(contentString2, testQuery); + } + + return alternateVersions; + } +} diff --git a/core/src/test/java/org/elasticsearch/index/query/IndexQueryParserFilterDateRangeFormatTests.java b/core/src/test/java/org/elasticsearch/index/query/IndexQueryParserFilterDateRangeFormatTests.java deleted file mode 100644 index 8b4c40585c4..00000000000 --- a/core/src/test/java/org/elasticsearch/index/query/IndexQueryParserFilterDateRangeFormatTests.java +++ /dev/null @@ -1,172 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.query; - - -import org.apache.lucene.search.NumericRangeQuery; -import org.apache.lucene.search.Query; -import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.inject.Injector; -import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.mapper.ParsedDocument; -import org.elasticsearch.search.internal.SearchContext; -import org.elasticsearch.test.ESSingleNodeTestCase; -import org.elasticsearch.test.TestSearchContext; -import org.joda.time.DateTime; -import org.junit.Before; -import org.junit.Test; - -import java.io.IOException; - -import static org.elasticsearch.test.StreamsUtils.copyToBytesFromClasspath; -import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; -import static org.hamcrest.Matchers.instanceOf; -import static org.hamcrest.Matchers.is; - -/** - * - */ -public class IndexQueryParserFilterDateRangeFormatTests extends ESSingleNodeTestCase { - - private Injector injector; - private IndexQueryParserService queryParser; - - @Before - public void setup() throws IOException { - IndexService indexService = createIndex("test"); - injector = indexService.injector(); - - MapperService mapperService = indexService.mapperService(); - String mapping = copyToStringFromClasspath("/org/elasticsearch/index/query/mapping.json"); - mapperService.merge("person", new CompressedXContent(mapping), true, false); - ParsedDocument doc = mapperService.documentMapper("person").parse("test", "person", "1", new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/query/data.json"))); - assertNotNull(doc.dynamicMappingsUpdate()); - client().admin().indices().preparePutMapping("test").setType("person").setSource(doc.dynamicMappingsUpdate().toString()).get(); - queryParser = injector.getInstance(IndexQueryParserService.class); - } - - private IndexQueryParserService queryParser() throws IOException { - return this.queryParser; - } - - @Test - public void testDateRangeFilterFormat() throws IOException { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/date_range_filter_format.json"); - queryParser.parse(query).query(); - // Sadly from NoCacheFilter, we can not access to the delegate filter so we can not check - // it's the one we are expecting - - // Test Invalid format - query = copyToStringFromClasspath("/org/elasticsearch/index/query/date_range_filter_format_invalid.json"); - try { - SearchContext.setCurrent(new TestSearchContext()); - // We need to rewrite, because range on date field initially returns LateParsingQuery - queryParser.parse(query).query().rewrite(null); - fail("A Range Filter with a specific format but with an unexpected date should raise a QueryParsingException"); - } catch (ElasticsearchParseException e) { - // We expect it - } finally { - SearchContext.removeCurrent(); - } - } - - @Test - public void testDateRangeQueryFormat() throws IOException { - IndexQueryParserService queryParser = queryParser(); - // We test 01/01/2012 from gte and 2030 for lt - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/date_range_query_format.json"); - Query parsedQuery; - try { - SearchContext.setCurrent(new TestSearchContext()); - // We need to rewrite, because range on date field initially returns LateParsingQuery - parsedQuery = queryParser.parse(query).query().rewrite(null); - } finally { - SearchContext.removeCurrent();; - } - assertThat(parsedQuery, instanceOf(NumericRangeQuery.class)); - - // Min value was 01/01/2012 (dd/MM/yyyy) - DateTime min = DateTime.parse("2012-01-01T00:00:00.000+00"); - assertThat(((NumericRangeQuery) parsedQuery).getMin().longValue(), is(min.getMillis())); - - // Max value was 2030 (yyyy) - DateTime max = DateTime.parse("2030-01-01T00:00:00.000+00"); - assertThat(((NumericRangeQuery) parsedQuery).getMax().longValue(), is(max.getMillis())); - - // Test Invalid format - query = copyToStringFromClasspath("/org/elasticsearch/index/query/date_range_query_format_invalid.json"); - try { - SearchContext.setCurrent(new TestSearchContext()); - queryParser.parse(query).query().rewrite(null); - fail("A Range Query with a specific format but with an unexpected date should raise a QueryParsingException"); - } catch (ElasticsearchParseException e) { - // We expect it - } finally { - SearchContext.removeCurrent(); - } - } - - @Test - public void testDateRangeBoundaries() throws IOException { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/date_range_query_boundaries_inclusive.json"); - Query parsedQuery; - try { - SearchContext.setCurrent(new TestSearchContext()); - // We need to rewrite, because range on date field initially returns LateParsingQuery - parsedQuery = queryParser.parse(query).query().rewrite(null); - } finally { - SearchContext.removeCurrent(); - } - assertThat(parsedQuery, instanceOf(NumericRangeQuery.class)); - NumericRangeQuery rangeQuery = (NumericRangeQuery) parsedQuery; - - DateTime min = DateTime.parse("2014-11-01T00:00:00.000+00"); - assertThat(rangeQuery.getMin().longValue(), is(min.getMillis())); - assertTrue(rangeQuery.includesMin()); - - DateTime max = DateTime.parse("2014-12-08T23:59:59.999+00"); - assertThat(rangeQuery.getMax().longValue(), is(max.getMillis())); - assertTrue(rangeQuery.includesMax()); - - query = copyToStringFromClasspath("/org/elasticsearch/index/query/date_range_query_boundaries_exclusive.json"); - try { - SearchContext.setCurrent(new TestSearchContext()); - // We need to rewrite, because range on date field initially returns LateParsingQuery - parsedQuery = queryParser.parse(query).query().rewrite(null); - } finally { - SearchContext.removeCurrent(); - } - assertThat(parsedQuery, instanceOf(NumericRangeQuery.class)); - rangeQuery = (NumericRangeQuery) parsedQuery; - - min = DateTime.parse("2014-11-30T23:59:59.999+00"); - assertThat(rangeQuery.getMin().longValue(), is(min.getMillis())); - assertFalse(rangeQuery.includesMin()); - - max = DateTime.parse("2014-12-08T00:00:00.000+00"); - assertThat(rangeQuery.getMax().longValue(), is(max.getMillis())); - assertFalse(rangeQuery.includesMax()); - } -} diff --git a/core/src/test/java/org/elasticsearch/index/query/IndexQueryParserFilterDateRangeTimezoneTests.java b/core/src/test/java/org/elasticsearch/index/query/IndexQueryParserFilterDateRangeTimezoneTests.java deleted file mode 100644 index 8bf70de8c1e..00000000000 --- a/core/src/test/java/org/elasticsearch/index/query/IndexQueryParserFilterDateRangeTimezoneTests.java +++ /dev/null @@ -1,130 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.query; - - -import org.apache.lucene.search.NumericRangeQuery; -import org.apache.lucene.search.Query; -import org.elasticsearch.common.ParsingException; -import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.inject.Injector; -import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.mapper.ParsedDocument; -import org.elasticsearch.search.internal.SearchContext; -import org.elasticsearch.test.ESSingleNodeTestCase; -import org.elasticsearch.test.TestSearchContext; -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; -import org.junit.Before; -import org.junit.Test; - -import java.io.IOException; - -import static org.elasticsearch.test.StreamsUtils.copyToBytesFromClasspath; -import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; -import static org.hamcrest.Matchers.instanceOf; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.lessThanOrEqualTo; - -/** - * - */ -public class IndexQueryParserFilterDateRangeTimezoneTests extends ESSingleNodeTestCase { - - private Injector injector; - private IndexQueryParserService queryParser; - - @Before - public void setup() throws IOException { - IndexService indexService = createIndex("test"); - injector = indexService.injector(); - - MapperService mapperService = indexService.mapperService(); - String mapping = copyToStringFromClasspath("/org/elasticsearch/index/query/mapping.json"); - mapperService.merge("person", new CompressedXContent(mapping), true, false); - ParsedDocument doc = mapperService.documentMapper("person").parse("test", "person", "1", new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/query/data.json"))); - assertNotNull(doc.dynamicMappingsUpdate()); - client().admin().indices().preparePutMapping("test").setType("person").setSource(doc.dynamicMappingsUpdate().toString()).get(); - queryParser = injector.getInstance(IndexQueryParserService.class); - } - - private IndexQueryParserService queryParser() throws IOException { - return this.queryParser; - } - - @Test - public void testDateRangeFilterTimezone() throws IOException { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/date_range_filter_timezone.json"); - queryParser.parse(query).query(); - // Sadly from NoCacheFilter, we can not access to the delegate filter so we can not check - // it's the one we are expecting - - query = copyToStringFromClasspath("/org/elasticsearch/index/query/date_range_filter_timezone_numeric_field.json"); - try { - SearchContext.setCurrent(new TestSearchContext()); - queryParser.parse(query).query(); - fail("A Range Filter on a numeric field with a TimeZone should raise a QueryParsingException"); - } catch (ParsingException e) { - // We expect it - } finally { - SearchContext.removeCurrent(); - } - } - - @Test - public void testDateRangeQueryTimezone() throws IOException { - long startDate = System.currentTimeMillis(); - - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/date_range_query_timezone.json"); - Query parsedQuery; - try { - SearchContext.setCurrent(new TestSearchContext()); - parsedQuery = queryParser.parse(query).query().rewrite(null); - } finally { - SearchContext.removeCurrent(); - } - assertThat(parsedQuery, instanceOf(NumericRangeQuery.class)); - - // Min value was 2012-01-01 (UTC) so we need to remove one hour - DateTime min = DateTime.parse("2012-01-01T00:00:00.000+01:00"); - // Max value is when we started the test. So it should be some ms from now - DateTime max = new DateTime(startDate, DateTimeZone.UTC); - - assertThat(((NumericRangeQuery) parsedQuery).getMin().longValue(), is(min.getMillis())); - - // We should not have a big difference here (should be some ms) - assertThat(((NumericRangeQuery) parsedQuery).getMax().longValue() - max.getMillis(), lessThanOrEqualTo(60000L)); - - query = copyToStringFromClasspath("/org/elasticsearch/index/query/date_range_query_timezone_numeric_field.json"); - try { - SearchContext.setCurrent(new TestSearchContext()); - queryParser.parse(query).query(); - fail("A Range Query on a numeric field with a TimeZone should raise a QueryParsingException"); - } catch (ParsingException e) { - // We expect it - } finally { - SearchContext.removeCurrent(); - } - } -} diff --git a/core/src/test/java/org/elasticsearch/index/query/IndicesQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/IndicesQueryBuilderTests.java new file mode 100644 index 00000000000..8db4317a589 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/query/IndicesQueryBuilderTests.java @@ -0,0 +1,109 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query; + +import org.apache.lucene.search.Query; +import org.junit.Test; + +import java.io.IOException; + +public class IndicesQueryBuilderTests extends AbstractQueryTestCase { + + @Override + protected IndicesQueryBuilder doCreateTestQueryBuilder() { + String[] indices; + if (randomBoolean()) { + indices = new String[]{getIndex().getName()}; + } else { + indices = generateRandomStringArray(5, 10, false, false); + } + IndicesQueryBuilder query = new IndicesQueryBuilder(RandomQueryBuilder.createQuery(random()), indices); + + switch (randomInt(2)) { + case 0: + query.noMatchQuery(RandomQueryBuilder.createQuery(random())); + break; + case 1: + query.noMatchQuery(randomFrom(QueryBuilders.matchAllQuery(), new MatchNoneQueryBuilder())); + break; + default: + // do not set noMatchQuery + } + return query; + } + + @Override + protected void doAssertLuceneQuery(IndicesQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { + Query expected; + if (queryBuilder.indices().length == 1 && getIndex().getName().equals(queryBuilder.indices()[0])) { + expected = queryBuilder.innerQuery().toQuery(context); + } else { + expected = queryBuilder.noMatchQuery().toQuery(context); + } + if (expected != null && queryBuilder.boost() != AbstractQueryBuilder.DEFAULT_BOOST) { + expected.setBoost(queryBuilder.boost()); + } + assertEquals(query, expected); + } + + @Override + protected void assertBoost(IndicesQueryBuilder queryBuilder, Query query) throws IOException { + //nothing to do here, boost check is already included in equality check done as part of doAssertLuceneQuery above + } + + @Test + public void testIllegalArguments() { + try { + new IndicesQueryBuilder(null, "index"); + fail("cannot be null"); + } catch (IllegalArgumentException e) { + // expected + } + + try { + new IndicesQueryBuilder(EmptyQueryBuilder.PROTOTYPE, (String[]) null); + fail("cannot be null"); + } catch (IllegalArgumentException e) { + // expected + } + + try { + new IndicesQueryBuilder(EmptyQueryBuilder.PROTOTYPE, new String[0]); + fail("cannot be empty"); + } catch (IllegalArgumentException e) { + // expected + } + + IndicesQueryBuilder indicesQueryBuilder = new IndicesQueryBuilder(EmptyQueryBuilder.PROTOTYPE, "index"); + try { + indicesQueryBuilder.noMatchQuery((QueryBuilder) null); + fail("cannot be null"); + } catch (IllegalArgumentException e) { + // expected + } + + try { + indicesQueryBuilder.noMatchQuery((String) null); + fail("cannot be null"); + } catch (IllegalArgumentException e) { + // expected + } + } +} diff --git a/core/src/test/java/org/elasticsearch/index/query/MatchAllQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/MatchAllQueryBuilderTests.java new file mode 100644 index 00000000000..7ddc6e8d74c --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/query/MatchAllQueryBuilderTests.java @@ -0,0 +1,52 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query; + +import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.search.Query; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.CoreMatchers.instanceOf; + +public class MatchAllQueryBuilderTests extends AbstractQueryTestCase { + + @Override + protected MatchAllQueryBuilder doCreateTestQueryBuilder() { + return new MatchAllQueryBuilder(); + } + + @Override + protected Map getAlternateVersions() { + Map alternateVersions = new HashMap<>(); + String queryAsString = "{\n" + + " \"match_all\": []\n" + + "}"; + alternateVersions.put(queryAsString, new MatchAllQueryBuilder()); + return alternateVersions; + } + + @Override + protected void doAssertLuceneQuery(MatchAllQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { + assertThat(query, instanceOf(MatchAllDocsQuery.class)); + } +} \ No newline at end of file diff --git a/core/src/main/java/org/elasticsearch/index/query/support/QueryInnerHitBuilder.java b/core/src/test/java/org/elasticsearch/index/query/MatchNoneQueryBuilderTests.java similarity index 50% rename from core/src/main/java/org/elasticsearch/index/query/support/QueryInnerHitBuilder.java rename to core/src/test/java/org/elasticsearch/index/query/MatchNoneQueryBuilderTests.java index 71229abe975..cb80f31599a 100644 --- a/core/src/main/java/org/elasticsearch/index/query/support/QueryInnerHitBuilder.java +++ b/core/src/test/java/org/elasticsearch/index/query/MatchNoneQueryBuilderTests.java @@ -17,35 +17,32 @@ * under the License. */ -package org.elasticsearch.index.query.support; +package org.elasticsearch.index.query; -import org.elasticsearch.common.xcontent.XContentBuilder; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.Query; import java.io.IOException; -/** - */ -public class QueryInnerHitBuilder extends BaseInnerHitBuilder { +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.instanceOf; - private String name; +public class MatchNoneQueryBuilderTests extends AbstractQueryTestCase { - /** - * Set the key name to be used in the response. - * - * Defaults to the path if used in nested query, child type if used in has_child query and parent type if used in has_parent. - */ - public QueryInnerHitBuilder setName(String name) { - this.name = name; - return this; + @Override + protected boolean supportsBoostAndQueryName() { + return false; } @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - super.toXContent(builder, params); - if (name != null) { - builder.field("name", name); - } - return builder; + protected AbstractQueryBuilder doCreateTestQueryBuilder() { + return new MatchNoneQueryBuilder(); } + @Override + protected void doAssertLuceneQuery(AbstractQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { + assertThat(query, instanceOf(BooleanQuery.class)); + BooleanQuery booleanQuery = (BooleanQuery) query; + assertThat(booleanQuery.clauses().size(), equalTo(0)); + } } diff --git a/core/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java new file mode 100644 index 00000000000..f9da80d97d6 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java @@ -0,0 +1,237 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query; + +import org.apache.lucene.queries.ExtendedCommonTermsQuery; +import org.apache.lucene.search.*; +import org.elasticsearch.common.lucene.search.MultiPhrasePrefixQuery; +import org.elasticsearch.common.lucene.search.Queries; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.search.MatchQuery; +import org.elasticsearch.index.search.MatchQuery.ZeroTermsQuery; +import org.junit.Test; + +import java.io.IOException; +import java.util.Locale; + +import static org.hamcrest.CoreMatchers.either; +import static org.hamcrest.CoreMatchers.instanceOf; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.notNullValue; + +public class MatchQueryBuilderTests extends AbstractQueryTestCase { + + @Override + protected MatchQueryBuilder doCreateTestQueryBuilder() { + String fieldName = randomFrom(STRING_FIELD_NAME, BOOLEAN_FIELD_NAME, INT_FIELD_NAME, + DOUBLE_FIELD_NAME, DATE_FIELD_NAME); + if (fieldName.equals(DATE_FIELD_NAME)) { + assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); + } + Object value = ""; + if (fieldName.equals(STRING_FIELD_NAME)) { + int terms = randomIntBetween(0, 3); + StringBuilder builder = new StringBuilder(); + for (int i = 0; i < terms; i++) { + builder.append(randomAsciiOfLengthBetween(1, 10) + " "); + } + value = builder.toString().trim(); + } else { + value = getRandomValueForFieldName(fieldName); + } + + MatchQueryBuilder matchQuery = new MatchQueryBuilder(fieldName, value); + matchQuery.type(randomFrom(MatchQuery.Type.values())); + matchQuery.operator(randomFrom(Operator.values())); + + if (randomBoolean()) { + matchQuery.analyzer(randomFrom("simple", "keyword", "whitespace")); + } + + if (randomBoolean()) { + matchQuery.slop(randomIntBetween(0, 10)); + } + + if (randomBoolean()) { + matchQuery.fuzziness(randomFuzziness(fieldName)); + } + + if (randomBoolean()) { + matchQuery.prefixLength(randomIntBetween(0, 10)); + } + + if (randomBoolean()) { + matchQuery.minimumShouldMatch(randomMinimumShouldMatch()); + } + + if (randomBoolean()) { + matchQuery.fuzzyRewrite(getRandomRewriteMethod()); + } + + if (randomBoolean()) { + matchQuery.fuzzyTranspositions(randomBoolean()); + } + + if (randomBoolean()) { + matchQuery.lenient(randomBoolean()); + } + + if (randomBoolean()) { + matchQuery.zeroTermsQuery(randomFrom(MatchQuery.ZeroTermsQuery.values())); + } + + if (randomBoolean()) { + matchQuery.cutoffFrequency((float) 10 / randomIntBetween(1, 100)); + } + return matchQuery; + } + + @Override + protected void doAssertLuceneQuery(MatchQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { + assertThat(query, notNullValue()); + + if (query instanceof MatchAllDocsQuery) { + assertThat(queryBuilder.zeroTermsQuery(), equalTo(ZeroTermsQuery.ALL)); + return; + } + + switch (queryBuilder.type()) { + case BOOLEAN: + assertThat(query, either(instanceOf(BooleanQuery.class)).or(instanceOf(ExtendedCommonTermsQuery.class)) + .or(instanceOf(TermQuery.class)).or(instanceOf(FuzzyQuery.class))); + break; + case PHRASE: + assertThat(query, either(instanceOf(BooleanQuery.class)).or(instanceOf(PhraseQuery.class)) + .or(instanceOf(TermQuery.class)).or(instanceOf(FuzzyQuery.class))); + break; + case PHRASE_PREFIX: + assertThat(query, either(instanceOf(BooleanQuery.class)).or(instanceOf(MultiPhrasePrefixQuery.class)) + .or(instanceOf(TermQuery.class)).or(instanceOf(FuzzyQuery.class))); + break; + } + + MappedFieldType fieldType = context.fieldMapper(queryBuilder.fieldName()); + if (query instanceof TermQuery && fieldType != null) { + String queryValue = queryBuilder.value().toString(); + if (queryBuilder.analyzer() == null || queryBuilder.analyzer().equals("simple")) { + queryValue = queryValue.toLowerCase(Locale.ROOT); + } + Query expectedTermQuery = fieldType.termQuery(queryValue, context); + // the real query will have boost applied, so we set it to our expeced as well + expectedTermQuery.setBoost(queryBuilder.boost()); + assertEquals(expectedTermQuery, query); + } + + if (query instanceof BooleanQuery) { + BooleanQuery bq = (BooleanQuery) query; + if (queryBuilder.minimumShouldMatch() != null) { + // calculate expected minimumShouldMatch value + int optionalClauses = 0; + for (BooleanClause c : bq.clauses()) { + if (c.getOccur() == BooleanClause.Occur.SHOULD) { + optionalClauses++; + } + } + int msm = Queries.calculateMinShouldMatch(optionalClauses, queryBuilder.minimumShouldMatch()); + assertThat(bq.getMinimumNumberShouldMatch(), equalTo(msm)); + } + if (queryBuilder.analyzer() == null && queryBuilder.value().toString().length() > 0) { + assertEquals(bq.clauses().size(), queryBuilder.value().toString().split(" ").length); + } + } + + if (query instanceof ExtendedCommonTermsQuery) { + assertTrue(queryBuilder.cutoffFrequency() != null); + ExtendedCommonTermsQuery ectq = (ExtendedCommonTermsQuery) query; + assertEquals(queryBuilder.cutoffFrequency(), ectq.getMaxTermFrequency(), Float.MIN_VALUE); + } + + if (query instanceof FuzzyQuery) { + assertTrue(queryBuilder.fuzziness() != null); + FuzzyQuery fuzzyQuery = (FuzzyQuery) query; + // depending on analyzer being set or not we can have term lowercased along the way, so to simplify test we just + // compare lowercased terms here + String originalTermLc = queryBuilder.value().toString().toLowerCase(Locale.ROOT); + String actualTermLc = fuzzyQuery.getTerm().text().toString().toLowerCase(Locale.ROOT); + assertThat(actualTermLc, equalTo(originalTermLc)); + assertThat(queryBuilder.prefixLength(), equalTo(fuzzyQuery.getPrefixLength())); + assertThat(queryBuilder.fuzzyTranspositions(), equalTo(fuzzyQuery.getTranspositions())); + } + } + + public void testIllegalValues() { + try { + new MatchQueryBuilder(null, "value"); + fail("value must not be non-null"); + } catch (IllegalArgumentException ex) { + // expected + } + + try { + new MatchQueryBuilder("fieldName", null); + fail("value must not be non-null"); + } catch (IllegalArgumentException ex) { + // expected + } + + MatchQueryBuilder matchQuery = new MatchQueryBuilder("fieldName", "text"); + try { + matchQuery.prefixLength(-1); + fail("must not be positive"); + } catch (IllegalArgumentException ex) { + // expected + } + + try { + matchQuery.maxExpansions(-1); + fail("must not be positive"); + } catch (IllegalArgumentException ex) { + // expected + } + + try { + matchQuery.operator(null); + fail("must not be non-null"); + } catch (IllegalArgumentException ex) { + // expected + } + + try { + matchQuery.type(null); + fail("must not be non-null"); + } catch (IllegalArgumentException ex) { + // expected + } + + try { + matchQuery.zeroTermsQuery(null); + fail("must not be non-null"); + } catch (IllegalArgumentException ex) { + // expected + } + } + + @Test(expected = QueryShardException.class) + public void testBadAnalyzer() throws IOException { + MatchQueryBuilder matchQuery = new MatchQueryBuilder("fieldName", "text"); + matchQuery.analyzer("bogusAnalyzer"); + matchQuery.toQuery(createShardContext()); + } +} diff --git a/core/src/test/java/org/elasticsearch/index/query/MissingQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/MissingQueryBuilderTests.java new file mode 100644 index 00000000000..0314ca957ab --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/query/MissingQueryBuilderTests.java @@ -0,0 +1,83 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query; + +import org.apache.lucene.search.Query; +import org.junit.Test; + +import java.io.IOException; + +public class MissingQueryBuilderTests extends AbstractQueryTestCase { + + @Override + protected MissingQueryBuilder doCreateTestQueryBuilder() { + String fieldName = randomBoolean() ? randomFrom(MAPPED_FIELD_NAMES) : randomAsciiOfLengthBetween(1, 10); + Boolean existence = randomBoolean(); + Boolean nullValue = randomBoolean(); + if (existence == false && nullValue == false) { + if (randomBoolean()) { + existence = true; + } else { + nullValue = true; + } + } + return new MissingQueryBuilder(fieldName, nullValue, existence); + } + + @Override + protected void doAssertLuceneQuery(MissingQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { + //too many mapping dependent cases to test, we don't want to end up duplication the toQuery method + } + + @Test + public void testIllegalArguments() { + try { + if (randomBoolean()) { + new MissingQueryBuilder("", true, true); + } else { + new MissingQueryBuilder(null, true, true); + } + fail("must not be null or empty"); + } catch (IllegalArgumentException e) { + // expected + } + + try { + new MissingQueryBuilder("fieldname", false, false); + fail("existence and nullValue cannot both be false"); + } catch (IllegalArgumentException e) { + // expected + } + + try { + new MissingQueryBuilder("fieldname", MissingQueryBuilder.DEFAULT_NULL_VALUE, false); + fail("existence and nullValue cannot both be false"); + } catch (IllegalArgumentException e) { + // expected + } + } + + @Test(expected = QueryShardException.class) + public void testBothNullValueAndExistenceFalse() throws IOException { + QueryShardContext context = createShardContext(); + context.setAllowUnmappedFields(true); + MissingQueryBuilder.newFilter(context, "field", false, false); + } +} diff --git a/core/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java new file mode 100644 index 00000000000..7777827b681 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java @@ -0,0 +1,299 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query; + +import org.apache.lucene.analysis.core.WhitespaceAnalyzer; +import org.apache.lucene.index.Fields; +import org.apache.lucene.index.MultiFields; +import org.apache.lucene.index.memory.MemoryIndex; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.Query; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.termvectors.MultiTermVectorsItemResponse; +import org.elasticsearch.action.termvectors.MultiTermVectorsRequest; +import org.elasticsearch.action.termvectors.MultiTermVectorsResponse; +import org.elasticsearch.action.termvectors.TermVectorsRequest; +import org.elasticsearch.action.termvectors.TermVectorsResponse; +import org.elasticsearch.common.ParseFieldMatcher; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.lucene.search.MoreLikeThisQuery; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.VersionType; +import org.elasticsearch.index.query.MoreLikeThisQueryBuilder.Item; +import org.junit.Before; +import org.junit.Test; + +import java.io.IOException; +import java.util.Arrays; +import java.util.EnumSet; +import java.util.HashMap; +import java.util.Map; +import java.util.stream.Stream; + +import static org.elasticsearch.index.query.QueryBuilders.moreLikeThisQuery; +import static org.hamcrest.Matchers.*; + +public class MoreLikeThisQueryBuilderTests extends AbstractQueryTestCase { + + private static String[] randomFields; + private static Item[] randomLikeItems; + private static Item[] randomUnlikeItems; + + @Before + public void setup() { + // MLT only supports string fields, unsupported fields are tested below + randomFields = randomStringFields(); + // we also preset the item requests + randomLikeItems = new Item[randomIntBetween(1, 3)]; + for (int i = 0; i < randomLikeItems.length; i++) { + randomLikeItems[i] = generateRandomItem(); + } + // and for the unlike items too + randomUnlikeItems = new Item[randomIntBetween(1, 3)]; + for (int i = 0; i < randomUnlikeItems.length; i++) { + randomUnlikeItems[i] = generateRandomItem(); + } + } + + private static String[] randomStringFields() { + String[] mappedStringFields = new String[]{STRING_FIELD_NAME, STRING_FIELD_NAME_2}; + String[] unmappedStringFields = generateRandomStringArray(2, 5, false, false); + return Stream.concat(Arrays.stream(mappedStringFields), Arrays.stream(unmappedStringFields)).toArray(String[]::new); + } + + private Item generateRandomItem() { + String index = randomBoolean() ? getIndex().getName() : null; + String type = getRandomType(); // set to one type to avoid ambiguous types + // indexed item or artificial document + Item item; + if (randomBoolean()) { + item = new Item(index, type, randomAsciiOfLength(10)); + } else { + item = new Item(index, type, randomArtificialDoc()); + } + // if no field is specified MLT uses all mapped fields for this item + if (randomBoolean()) { + item.fields(randomFrom(randomFields)); + } + // per field analyzer + if (randomBoolean()) { + item.perFieldAnalyzer(randomPerFieldAnalyzer()); + } + if (randomBoolean()) { + item.routing(randomAsciiOfLength(10)); + } + if (randomBoolean()) { + item.version(randomInt(5)); + } + if (randomBoolean()) { + item.versionType(randomFrom(VersionType.values())); + } + return item; + } + + private XContentBuilder randomArtificialDoc() { + XContentBuilder doc; + try { + doc = XContentFactory.jsonBuilder().startObject(); + for (String field : randomFields) { + doc.field(field, randomAsciiOfLength(10)); + } + } catch (IOException e) { + throw new ElasticsearchException("Unable to generate random artificial doc!"); + } + return doc; + } + + private Map randomPerFieldAnalyzer() { + Map perFieldAnalyzer = new HashMap<>(); + for (String field : randomFields) { + perFieldAnalyzer.put(field, randomAnalyzer()); + } + return perFieldAnalyzer; + } + + @Override + protected MoreLikeThisQueryBuilder doCreateTestQueryBuilder() { + MoreLikeThisQueryBuilder queryBuilder; + String[] likeTexts = null; + Item[] likeItems = null; + // like field is required + if (randomBoolean()) { + likeTexts = generateRandomStringArray(5, 5, false, false); + } else { + likeItems = randomLikeItems; + } + if (randomBoolean()) { // for the default field + queryBuilder = new MoreLikeThisQueryBuilder(likeTexts, likeItems); + } else { + queryBuilder = new MoreLikeThisQueryBuilder(randomFields, likeTexts, likeItems); + } + + if (randomBoolean()) { + queryBuilder.unlike(generateRandomStringArray(5, 5, false, false)); + } + if (randomBoolean()) { + queryBuilder.unlike(randomUnlikeItems); + } + if (randomBoolean()) { + queryBuilder.maxQueryTerms(randomInt(25)); + } + if (randomBoolean()) { + queryBuilder.minTermFreq(randomInt(5)); + } + if (randomBoolean()) { + queryBuilder.minDocFreq(randomInt(5)); + } + if (randomBoolean()) { + queryBuilder.maxDocFreq(randomInt(100)); + } + if (randomBoolean()) { + queryBuilder.minWordLength(randomInt(5)); + } + if (randomBoolean()) { + queryBuilder.maxWordLength(randomInt(25)); + } + if (randomBoolean()) { + queryBuilder.stopWords(generateRandomStringArray(5, 5, false, false)); + } + if (randomBoolean()) { + queryBuilder.analyzer(randomAnalyzer()); // fix the analyzer? + } + if (randomBoolean()) { + queryBuilder.minimumShouldMatch(randomMinimumShouldMatch()); + } + if (randomBoolean()) { + queryBuilder.boostTerms(randomFloat() * 10); + } + if (randomBoolean()) { + queryBuilder.include(randomBoolean()); + } + if (randomBoolean()) { + queryBuilder.failOnUnsupportedField(randomBoolean()); + } + return queryBuilder; + } + + @Override + protected MultiTermVectorsResponse executeMultiTermVectors(MultiTermVectorsRequest mtvRequest) { + try { + MultiTermVectorsItemResponse[] responses = new MultiTermVectorsItemResponse[mtvRequest.size()]; + int i = 0; + for (TermVectorsRequest request : mtvRequest) { + TermVectorsResponse response = new TermVectorsResponse(request.index(), request.type(), request.id()); + response.setExists(true); + Fields generatedFields; + if (request.doc() != null) { + generatedFields = generateFields(randomFields, request.doc().toUtf8()); + } else { + generatedFields = generateFields(request.selectedFields().toArray(new String[request.selectedFields().size()]), request.id()); + } + EnumSet flags = EnumSet.of(TermVectorsRequest.Flag.Positions, TermVectorsRequest.Flag.Offsets); + response.setFields(generatedFields, request.selectedFields(), flags, generatedFields); + responses[i++] = new MultiTermVectorsItemResponse(response, null); + } + return new MultiTermVectorsResponse(responses); + } catch (IOException ex) { + throw new ElasticsearchException("boom", ex); + } + } + + /** + * Here we could go overboard and use a pre-generated indexed random document for a given Item, + * but for now we'd prefer to simply return the id as the content of the document and that for + * every field. + */ + private static Fields generateFields(String[] fieldNames, String text) throws IOException { + MemoryIndex index = new MemoryIndex(); + for (String fieldName : fieldNames) { + index.addField(fieldName, text, new WhitespaceAnalyzer()); + } + return MultiFields.getFields(index.createSearcher().getIndexReader()); + } + + @Override + protected void doAssertLuceneQuery(MoreLikeThisQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { + if (queryBuilder.likeItems() != null && queryBuilder.likeItems().length > 0) { + assertThat(query, instanceOf(BooleanQuery.class)); + } else { + // we rely on integration tests for a deeper check here + assertThat(query, instanceOf(MoreLikeThisQuery.class)); + } + } + + @Test(expected=IllegalArgumentException.class) + public void testValidateEmptyFields() { + new MoreLikeThisQueryBuilder(new String[0], new String[]{"likeText"}, null); + } + + @Test(expected=IllegalArgumentException.class) + public void testValidateEmptyLike() { + String[] likeTexts = randomBoolean() ? null : new String[0]; + Item[] likeItems = randomBoolean() ? null : new Item[0]; + new MoreLikeThisQueryBuilder(likeTexts, likeItems); + } + + @Test + public void testUnsupportedFields() throws IOException { + assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); + String unsupportedField = randomFrom(INT_FIELD_NAME, DOUBLE_FIELD_NAME, DATE_FIELD_NAME); + MoreLikeThisQueryBuilder queryBuilder = new MoreLikeThisQueryBuilder(new String[] {unsupportedField}, new String[]{"some text"}, null) + .failOnUnsupportedField(true); + try { + queryBuilder.toQuery(createShardContext()); + fail("should have failed with IllegalArgumentException for field: " + unsupportedField); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), containsString("more_like_this doesn't support binary/numeric fields")); + } + } + + @Test + public void testMoreLikeThisBuilder() throws Exception { + Query parsedQuery = parseQuery(moreLikeThisQuery(new String[]{"name.first", "name.last"}, new String[]{"something"}, null).minTermFreq(1).maxQueryTerms(12).buildAsBytes()).toQuery(createShardContext()); + assertThat(parsedQuery, instanceOf(MoreLikeThisQuery.class)); + MoreLikeThisQuery mltQuery = (MoreLikeThisQuery) parsedQuery; + assertThat(mltQuery.getMoreLikeFields()[0], equalTo("name.first")); + assertThat(mltQuery.getLikeText(), equalTo("something")); + assertThat(mltQuery.getMinTermFrequency(), equalTo(1)); + assertThat(mltQuery.getMaxQueryTerms(), equalTo(12)); + } + + @Test + public void testItemSerialization() throws IOException { + Item expectedItem = generateRandomItem(); + BytesStreamOutput output = new BytesStreamOutput(); + expectedItem.writeTo(output); + Item newItem = Item.readItemFrom(StreamInput.wrap(output.bytes())); + assertEquals(expectedItem, newItem); + } + + @Test + public void testItemFromXContent() throws IOException { + Item expectedItem = generateRandomItem(); + String json = expectedItem.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS).string(); + XContentParser parser = XContentFactory.xContent(json).createParser(json); + Item newItem = Item.parse(parser, ParseFieldMatcher.STRICT, new Item()); + assertEquals(expectedItem, newItem); + } +} diff --git a/core/src/test/java/org/elasticsearch/index/query/MultiMatchQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/MultiMatchQueryBuilderTests.java new file mode 100644 index 00000000000..8014597a4c7 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/query/MultiMatchQueryBuilderTests.java @@ -0,0 +1,226 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query; + +import org.apache.lucene.index.Term; +import org.apache.lucene.queries.ExtendedCommonTermsQuery; +import org.apache.lucene.search.*; +import org.elasticsearch.common.lucene.all.AllTermQuery; +import org.elasticsearch.common.lucene.search.MultiPhrasePrefixQuery; +import org.elasticsearch.index.search.MatchQuery; +import org.junit.Test; + +import java.io.IOException; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static org.elasticsearch.index.query.QueryBuilders.multiMatchQuery; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertBooleanSubQuery; +import static org.hamcrest.CoreMatchers.*; + +public class MultiMatchQueryBuilderTests extends AbstractQueryTestCase { + + @Override + protected MultiMatchQueryBuilder doCreateTestQueryBuilder() { + String fieldName = randomFrom(STRING_FIELD_NAME, INT_FIELD_NAME, DOUBLE_FIELD_NAME, BOOLEAN_FIELD_NAME, DATE_FIELD_NAME); + if (fieldName.equals(DATE_FIELD_NAME)) { + assumeTrue("test with date fields runs only when at least a type is registered", getCurrentTypes().length > 0); + } + // creates the query with random value and field name + Object value; + if (fieldName.equals(STRING_FIELD_NAME)) { + value = getRandomQueryText(); + } else { + value = getRandomValueForFieldName(fieldName); + } + MultiMatchQueryBuilder query = new MultiMatchQueryBuilder(value, fieldName); + // field with random boost + if (randomBoolean()) { + query.field(fieldName, randomFloat() * 10); + } + // sets other parameters of the multi match query + if (randomBoolean()) { + query.type(randomFrom(MultiMatchQueryBuilder.Type.values())); + } + if (randomBoolean()) { + query.operator(randomFrom(Operator.values())); + } + if (randomBoolean()) { + query.analyzer(randomAnalyzer()); + } + if (randomBoolean()) { + query.slop(randomIntBetween(0, 5)); + } + if (randomBoolean()) { + query.fuzziness(randomFuzziness(fieldName)); + } + if (randomBoolean()) { + query.prefixLength(randomIntBetween(0, 5)); + } + if (randomBoolean()) { + query.maxExpansions(randomIntBetween(1, 5)); + } + if (randomBoolean()) { + query.minimumShouldMatch(randomMinimumShouldMatch()); + } + if (randomBoolean()) { + query.fuzzyRewrite(getRandomRewriteMethod()); + } + if (randomBoolean()) { + query.useDisMax(randomBoolean()); + } + if (randomBoolean()) { + query.tieBreaker(randomFloat()); + } + if (randomBoolean()) { + query.lenient(randomBoolean()); + } + if (randomBoolean()) { + query.cutoffFrequency((float) 10 / randomIntBetween(1, 100)); + } + if (randomBoolean()) { + query.zeroTermsQuery(randomFrom(MatchQuery.ZeroTermsQuery.values())); + } + // test with fields with boost and patterns delegated to the tests further below + return query; + } + + @Override + protected Map getAlternateVersions() { + Map alternateVersions = new HashMap<>(); + String query = "{\n" + + " \"multi_match\": {\n" + + " \"query\": \"foo bar\",\n" + + " \"fields\": \"myField\"\n" + + " }\n" + + "}"; + alternateVersions.put(query, new MultiMatchQueryBuilder("foo bar", "myField")); + return alternateVersions; + } + + @Override + protected void doAssertLuceneQuery(MultiMatchQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { + // we rely on integration tests for deeper checks here + assertThat(query, either(instanceOf(TermQuery.class)).or(instanceOf(AllTermQuery.class)) + .or(instanceOf(BooleanQuery.class)).or(instanceOf(DisjunctionMaxQuery.class)) + .or(instanceOf(FuzzyQuery.class)).or(instanceOf(MultiPhrasePrefixQuery.class)) + .or(instanceOf(MatchAllDocsQuery.class)).or(instanceOf(ExtendedCommonTermsQuery.class)) + .or(instanceOf(MatchNoDocsQuery.class)).or(instanceOf(PhraseQuery.class))); + } + + @Test + public void testIllegaArguments() { + try { + new MultiMatchQueryBuilder(null, "field"); + fail("value must not be null"); + } catch (IllegalArgumentException e) { + // expected + } + + try { + new MultiMatchQueryBuilder("value", (String[]) null); + fail("initial fields must be supplied at construction time must not be null"); + } catch (IllegalArgumentException e) { + // expected + } + + try { + new MultiMatchQueryBuilder("value", new String[]{""}); + fail("field names cannot be empty"); + } catch (IllegalArgumentException e) { + // expected + } + + try { + new MultiMatchQueryBuilder("value", "field").type(null); + fail("type must not be null"); + } catch (IllegalArgumentException e) { + // expected + } + } + + @Override + protected void assertBoost(MultiMatchQueryBuilder queryBuilder, Query query) throws IOException { + //we delegate boost checks to specific boost tests below + } + + @Test + public void testToQueryBoost() throws IOException { + assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); + QueryShardContext shardContext = createShardContext(); + MultiMatchQueryBuilder multiMatchQueryBuilder = new MultiMatchQueryBuilder("test"); + multiMatchQueryBuilder.field(STRING_FIELD_NAME, 5); + Query query = multiMatchQueryBuilder.toQuery(shardContext); + assertThat(query, instanceOf(TermQuery.class)); + assertThat(query.getBoost(), equalTo(5f)); + + multiMatchQueryBuilder = new MultiMatchQueryBuilder("test"); + multiMatchQueryBuilder.field(STRING_FIELD_NAME, 5); + multiMatchQueryBuilder.boost(2); + query = multiMatchQueryBuilder.toQuery(shardContext); + assertThat(query, instanceOf(TermQuery.class)); + assertThat(query.getBoost(), equalTo(10f)); + } + + @Test + public void testToQueryMultipleTermsBooleanQuery() throws Exception { + assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); + Query query = multiMatchQuery("test1 test2").field(STRING_FIELD_NAME).useDisMax(false).toQuery(createShardContext()); + assertThat(query, instanceOf(BooleanQuery.class)); + BooleanQuery bQuery = (BooleanQuery) query; + assertThat(bQuery.clauses().size(), equalTo(2)); + assertThat(assertBooleanSubQuery(query, TermQuery.class, 0).getTerm(), equalTo(new Term(STRING_FIELD_NAME, "test1"))); + assertThat(assertBooleanSubQuery(query, TermQuery.class, 1).getTerm(), equalTo(new Term(STRING_FIELD_NAME, "test2"))); + } + + @Test + public void testToQueryMultipleFieldsBooleanQuery() throws Exception { + assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); + Query query = multiMatchQuery("test").field(STRING_FIELD_NAME).field(STRING_FIELD_NAME_2).useDisMax(false).toQuery(createShardContext()); + assertThat(query, instanceOf(BooleanQuery.class)); + BooleanQuery bQuery = (BooleanQuery) query; + assertThat(bQuery.clauses().size(), equalTo(2)); + assertThat(assertBooleanSubQuery(query, TermQuery.class, 0).getTerm(), equalTo(new Term(STRING_FIELD_NAME, "test"))); + assertThat(assertBooleanSubQuery(query, TermQuery.class, 1).getTerm(), equalTo(new Term(STRING_FIELD_NAME_2, "test"))); + } + + @Test + public void testToQueryMultipleFieldsDisMaxQuery() throws Exception { + assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); + Query query = multiMatchQuery("test").field(STRING_FIELD_NAME).field(STRING_FIELD_NAME_2).useDisMax(true).toQuery(createShardContext()); + assertThat(query, instanceOf(DisjunctionMaxQuery.class)); + DisjunctionMaxQuery disMaxQuery = (DisjunctionMaxQuery) query; + List disjuncts = disMaxQuery.getDisjuncts(); + assertThat(((TermQuery) disjuncts.get(0)).getTerm(), equalTo(new Term(STRING_FIELD_NAME, "test"))); + assertThat(((TermQuery) disjuncts.get(1)).getTerm(), equalTo(new Term(STRING_FIELD_NAME_2, "test"))); + } + + @Test + public void testToQueryFieldsWildcard() throws Exception { + assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); + Query query = multiMatchQuery("test").field("mapped_str*").useDisMax(false).toQuery(createShardContext()); + assertThat(query, instanceOf(BooleanQuery.class)); + BooleanQuery bQuery = (BooleanQuery) query; + assertThat(bQuery.clauses().size(), equalTo(2)); + assertThat(assertBooleanSubQuery(query, TermQuery.class, 0).getTerm(), equalTo(new Term(STRING_FIELD_NAME, "test"))); + assertThat(assertBooleanSubQuery(query, TermQuery.class, 1).getTerm(), equalTo(new Term(STRING_FIELD_NAME_2, "test"))); + } +} diff --git a/core/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java new file mode 100644 index 00000000000..0299068a916 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java @@ -0,0 +1,188 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query; + +import com.carrotsearch.randomizedtesting.generators.RandomPicks; + +import org.apache.lucene.search.Query; +import org.apache.lucene.search.join.ScoreMode; +import org.apache.lucene.search.join.ToParentBlockJoinQuery; +import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; +import org.elasticsearch.common.ParseFieldMatcher; +import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.xcontent.*; +import org.elasticsearch.index.fielddata.IndexFieldDataService; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.query.support.QueryInnerHits; +import org.elasticsearch.search.fetch.innerhits.InnerHitsBuilder; +import org.elasticsearch.search.fetch.innerhits.InnerHitsContext; +import org.elasticsearch.search.internal.SearchContext; +import org.elasticsearch.search.sort.SortOrder; +import org.elasticsearch.test.TestSearchContext; +import org.junit.Test; + +import java.io.IOException; +import java.util.Arrays; + +import static org.hamcrest.CoreMatchers.instanceOf; + +public class NestedQueryBuilderTests extends AbstractQueryTestCase { + + @Override + public void setUp() throws Exception { + super.setUp(); + MapperService mapperService = queryParserService().mapperService; + mapperService.merge("nested_doc", new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef("nested_doc", + STRING_FIELD_NAME, "type=string", + INT_FIELD_NAME, "type=integer", + DOUBLE_FIELD_NAME, "type=double", + BOOLEAN_FIELD_NAME, "type=boolean", + DATE_FIELD_NAME, "type=date", + OBJECT_FIELD_NAME, "type=object", + "nested1", "type=nested" + ).string()), false, false); + } + + @Override + protected void setSearchContext(String[] types) { + final MapperService mapperService = queryParserService().mapperService; + final IndexFieldDataService fieldData = queryParserService().fieldDataService; + TestSearchContext testSearchContext = new TestSearchContext() { + private InnerHitsContext context; + + + @Override + public void innerHits(InnerHitsContext innerHitsContext) { + context = innerHitsContext; + } + + @Override + public InnerHitsContext innerHits() { + return context; + } + + @Override + public MapperService mapperService() { + return mapperService; // need to build / parse inner hits sort fields + } + + @Override + public IndexFieldDataService fieldData() { + return fieldData; // need to build / parse inner hits sort fields + } + }; + testSearchContext.setTypes(types); + SearchContext.setCurrent(testSearchContext); + } + + /** + * @return a {@link HasChildQueryBuilder} with random values all over the place + */ + @Override + protected NestedQueryBuilder doCreateTestQueryBuilder() { + InnerHitsBuilder.InnerHit innerHit = new InnerHitsBuilder.InnerHit().setSize(100).addSort(STRING_FIELD_NAME, SortOrder.ASC); + return new NestedQueryBuilder("nested1", RandomQueryBuilder.createQuery(random()), + RandomPicks.randomFrom(random(), ScoreMode.values()), + SearchContext.current() == null ? null : new QueryInnerHits("inner_hits_name", innerHit)); + } + + @Override + protected void doAssertLuceneQuery(NestedQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { + QueryBuilder innerQueryBuilder = queryBuilder.query(); + if (innerQueryBuilder instanceof EmptyQueryBuilder) { + assertNull(query); + } else { + assertThat(query, instanceOf(ToParentBlockJoinQuery.class)); + ToParentBlockJoinQuery parentBlockJoinQuery = (ToParentBlockJoinQuery) query; + //TODO how to assert this? + } + if (queryBuilder.innerHit() != null) { + assertNotNull(SearchContext.current()); + if (query != null) { + assertNotNull(SearchContext.current().innerHits()); + assertEquals(1, SearchContext.current().innerHits().getInnerHits().size()); + assertTrue(SearchContext.current().innerHits().getInnerHits().containsKey("inner_hits_name")); + InnerHitsContext.BaseInnerHits innerHits = SearchContext.current().innerHits().getInnerHits().get("inner_hits_name"); + assertEquals(innerHits.size(), 100); + assertEquals(innerHits.sort().getSort().length, 1); + assertEquals(innerHits.sort().getSort()[0].getField(), STRING_FIELD_NAME); + } else { + assertNull(SearchContext.current().innerHits()); + } + } + } + + public void testParseDeprecatedFilter() throws IOException { + XContentBuilder builder = XContentFactory.jsonBuilder().prettyPrint(); + builder.startObject(); + builder.startObject("nested"); + builder.startObject("filter"); + builder.startObject("terms").array(STRING_FIELD_NAME, "a", "b").endObject();// deprecated + builder.endObject(); + builder.field("path", "foo.bar"); + builder.endObject(); + builder.endObject(); + + QueryShardContext shardContext = createShardContext(); + QueryParseContext context = shardContext.parseContext(); + XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(builder.string()); + context.reset(parser); + context.parseFieldMatcher(ParseFieldMatcher.STRICT); + try { + context.parseInnerQueryBuilder(); + fail("filter is deprecated"); + } catch (IllegalArgumentException ex) { + assertEquals("Deprecated field [filter] used, replaced by [query]", ex.getMessage()); + } + + parser = XContentFactory.xContent(XContentType.JSON).createParser(builder.string()); + context.reset(parser); + NestedQueryBuilder queryBuilder = (NestedQueryBuilder) context.parseInnerQueryBuilder(); + QueryBuilder query = queryBuilder.query(); + assertTrue(query instanceof TermsQueryBuilder); + TermsQueryBuilder tqb = (TermsQueryBuilder) query; + assertEquals(tqb.values(), Arrays.asList("a", "b")); + } + + @Test + public void testValidate() { + try { + new NestedQueryBuilder(null, EmptyQueryBuilder.PROTOTYPE); + fail("cannot be null"); + } catch (IllegalArgumentException e) { + // expected + } + + try { + new NestedQueryBuilder("path", null); + fail("cannot be null"); + } catch (IllegalArgumentException e) { + // expected + } + + NestedQueryBuilder nestedQueryBuilder = new NestedQueryBuilder("path", EmptyQueryBuilder.PROTOTYPE); + try { + nestedQueryBuilder.scoreMode(null); + fail("cannot be null"); + } catch (IllegalArgumentException e) { + // expected + } + } +} diff --git a/core/src/test/java/org/elasticsearch/index/query/NotQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/NotQueryBuilderTests.java new file mode 100644 index 00000000000..cae4ce5ebce --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/query/NotQueryBuilderTests.java @@ -0,0 +1,110 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query; + +import org.apache.lucene.search.BooleanClause; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.search.Query; +import org.elasticsearch.common.ParseFieldMatcher; +import org.elasticsearch.common.ParsingException; +import org.junit.Test; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.CoreMatchers.*; + +public class NotQueryBuilderTests extends AbstractQueryTestCase { + + /** + * @return a NotQueryBuilder with random limit between 0 and 20 + */ + @Override + protected NotQueryBuilder doCreateTestQueryBuilder() { + return new NotQueryBuilder(RandomQueryBuilder.createQuery(random())); + } + + @Override + protected void doAssertLuceneQuery(NotQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { + Query filter = queryBuilder.innerQuery().toQuery(context); + if (filter == null) { + assertThat(query, nullValue()); + } else { + assertThat(query, instanceOf(BooleanQuery.class)); + BooleanQuery booleanQuery = (BooleanQuery) query; + assertThat(booleanQuery.clauses().size(), equalTo(2)); + assertThat(booleanQuery.clauses().get(0).getOccur(), equalTo(BooleanClause.Occur.MUST)); + assertThat(booleanQuery.clauses().get(0).getQuery(), instanceOf(MatchAllDocsQuery.class)); + assertThat(booleanQuery.clauses().get(1).getOccur(), equalTo(BooleanClause.Occur.MUST_NOT)); + assertThat(booleanQuery.clauses().get(1).getQuery(), equalTo(filter)); + } + } + + @Test(expected=ParsingException.class) + public void testMissingFilterSection() throws IOException { + String queryString = "{ \"not\" : {}"; + parseQuery(queryString); + } + + @Override + protected Map getAlternateVersions() { + Map alternateVersions = new HashMap<>(); + QueryBuilder innerQuery = createTestQueryBuilder().innerQuery(); + //not doesn't support empty query when query/filter element is not specified + if (innerQuery != EmptyQueryBuilder.PROTOTYPE) { + NotQueryBuilder testQuery2 = new NotQueryBuilder(innerQuery); + String contentString2 = "{\n" + + " \"not\" : " + testQuery2.innerQuery().toString() + "\n}"; + alternateVersions.put(contentString2, testQuery2); + } + + return alternateVersions; + } + + + public void testDeprecatedXContent() throws IOException { + String deprecatedJson = "{\n" + + " \"not\" : {\n" + + " \"filter\" : " + EmptyQueryBuilder.PROTOTYPE.toString() + "\n" + + " }\n" + + "}"; + try { + parseQuery(deprecatedJson); + fail("filter is deprecated"); + } catch (IllegalArgumentException ex) { + assertEquals("Deprecated field [filter] used, expected [query] instead", ex.getMessage()); + } + + NotQueryBuilder queryBuilder = (NotQueryBuilder) parseQuery(deprecatedJson, ParseFieldMatcher.EMPTY); + assertEquals(EmptyQueryBuilder.PROTOTYPE, queryBuilder.innerQuery()); + } + + @Test + public void testValidate() { + try { + new NotQueryBuilder(null); + fail("cannot be null"); + } catch (IllegalArgumentException e) { + // expected + } + } +} diff --git a/core/src/test/java/org/elasticsearch/index/query/OperatorTests.java b/core/src/test/java/org/elasticsearch/index/query/OperatorTests.java new file mode 100644 index 00000000000..f28688d0c17 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/query/OperatorTests.java @@ -0,0 +1,86 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query; + +import org.apache.lucene.queryparser.classic.QueryParser; +import org.apache.lucene.search.BooleanClause; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.Matchers.equalTo; + +public class OperatorTests extends ESTestCase { + + public void testValidOrdinals() { + assertThat(Operator.OR.ordinal(), equalTo(0)); + assertThat(Operator.AND.ordinal(), equalTo(1)); + } + + public void testWriteTo() throws Exception { + try (BytesStreamOutput out = new BytesStreamOutput()) { + Operator.OR.writeTo(out); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + assertThat(in.readVInt(), equalTo(0)); + } + } + + try (BytesStreamOutput out = new BytesStreamOutput()) { + Operator.AND.writeTo(out); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + assertThat(in.readVInt(), equalTo(1)); + } + } + } + + public void testReadFrom() throws Exception { + try (BytesStreamOutput out = new BytesStreamOutput()) { + out.writeVInt(0); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + assertThat(Operator.readOperatorFrom(in), equalTo(Operator.OR)); + } + } + try (BytesStreamOutput out = new BytesStreamOutput()) { + out.writeVInt(1); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + assertThat(Operator.readOperatorFrom(in), equalTo(Operator.AND)); + } + } + } + + public void testToBooleanClauseOccur() { + assertThat(Operator.AND.toBooleanClauseOccur(), equalTo(BooleanClause.Occur.MUST)); + assertThat(Operator.OR.toBooleanClauseOccur(), equalTo(BooleanClause.Occur.SHOULD)); + } + + public void testToQueryParserOperator() { + assertThat(Operator.AND.toQueryParserOperator(), equalTo(QueryParser.Operator.AND)); + assertThat(Operator.OR.toQueryParserOperator(), equalTo(QueryParser.Operator.OR)); + } + + public void testFromString() { + assertThat(Operator.fromString("and"), equalTo(Operator.AND)); + assertThat(Operator.fromString("AND"), equalTo(Operator.AND)); + assertThat(Operator.fromString("AnD"), equalTo(Operator.AND)); + assertThat(Operator.fromString("or"), equalTo(Operator.OR)); + assertThat(Operator.fromString("OR"), equalTo(Operator.OR)); + assertThat(Operator.fromString("Or"), equalTo(Operator.OR)); + } +} diff --git a/core/src/test/java/org/elasticsearch/index/query/PrefixQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/PrefixQueryBuilderTests.java new file mode 100644 index 00000000000..7d7a3a4cfeb --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/query/PrefixQueryBuilderTests.java @@ -0,0 +1,88 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query; + +import org.apache.lucene.index.Term; +import org.apache.lucene.search.MultiTermQuery; +import org.apache.lucene.search.PrefixQuery; +import org.apache.lucene.search.Query; +import org.junit.Test; + +import java.io.IOException; +import java.util.Arrays; + +import static org.elasticsearch.index.query.QueryBuilders.prefixQuery; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; + +public class PrefixQueryBuilderTests extends AbstractQueryTestCase { + + @Override + protected PrefixQueryBuilder doCreateTestQueryBuilder() { + String fieldName = randomBoolean() ? STRING_FIELD_NAME : randomAsciiOfLengthBetween(1, 10); + String value = randomAsciiOfLengthBetween(1, 10); + PrefixQueryBuilder query = new PrefixQueryBuilder(fieldName, value); + + if (randomBoolean()) { + query.rewrite(getRandomRewriteMethod()); + } + return query; + } + + @Override + protected void doAssertLuceneQuery(PrefixQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { + assertThat(query, instanceOf(PrefixQuery.class)); + PrefixQuery prefixQuery = (PrefixQuery) query; + assertThat(prefixQuery.getPrefix().field(), equalTo(queryBuilder.fieldName())); + assertThat(prefixQuery.getPrefix().text(), equalTo(queryBuilder.value())); + } + + @Test + public void testIllegalArguments() { + try { + if (randomBoolean()) { + new PrefixQueryBuilder(null, "text"); + } else { + new PrefixQueryBuilder("", "text"); + } + fail("cannot be null or empty"); + } catch (IllegalArgumentException e) { + // expected + } + + try { + new PrefixQueryBuilder("field", null); + fail("cannot be null or empty"); + } catch (IllegalArgumentException e) { + // expected + } + } + + @Test + public void testBlendedRewriteMethod() throws IOException { + for (String rewrite : Arrays.asList("top_terms_blended_freqs_10", "topTermsBlendedFreqs10")) { + Query parsedQuery = parseQuery(prefixQuery("field", "val").rewrite(rewrite).buildAsBytes()).toQuery(createShardContext()); + assertThat(parsedQuery, instanceOf(PrefixQuery.class)); + PrefixQuery prefixQuery = (PrefixQuery) parsedQuery; + assertThat(prefixQuery.getPrefix(), equalTo(new Term("field", "val"))); + assertThat(prefixQuery.getRewriteMethod(), instanceOf(MultiTermQuery.TopTermsBlendedFreqScoringRewrite.class)); + } + } +} \ No newline at end of file diff --git a/core/src/test/java/org/elasticsearch/index/query/QueryFilterBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/QueryFilterBuilderTests.java new file mode 100644 index 00000000000..15075b30921 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/query/QueryFilterBuilderTests.java @@ -0,0 +1,78 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query; + +import org.apache.lucene.search.ConstantScoreQuery; +import org.apache.lucene.search.Query; +import org.junit.Test; + +import java.io.IOException; + +import static org.hamcrest.CoreMatchers.*; + +@SuppressWarnings("deprecation") +public class QueryFilterBuilderTests extends AbstractQueryTestCase { + + @Override + protected QueryFilterBuilder doCreateTestQueryBuilder() { + QueryBuilder innerQuery = RandomQueryBuilder.createQuery(random()); + return new QueryFilterBuilder(innerQuery); + } + + @Override + protected void doAssertLuceneQuery(QueryFilterBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { + Query innerQuery = queryBuilder.innerQuery().toQuery(context); + if (innerQuery == null) { + assertThat(query, nullValue()); + } else { + assertThat(query, instanceOf(ConstantScoreQuery.class)); + ConstantScoreQuery constantScoreQuery = (ConstantScoreQuery) query; + assertThat(constantScoreQuery.getQuery(), equalTo(innerQuery)); + } + } + + @Override + protected boolean supportsBoostAndQueryName() { + return false; + } + + /** + * test that wrapping an inner filter that returns null also returns null to pass on upwards + */ + @Test + public void testInnerQueryReturnsNull() throws IOException { + // create inner filter + String queryString = "{ \"constant_score\" : { \"filter\" : {} } }"; + QueryBuilder innerQuery = parseQuery(queryString); + // check that when wrapping this filter, toQuery() returns null + QueryFilterBuilder queryFilterQuery = new QueryFilterBuilder(innerQuery); + assertNull(queryFilterQuery.toQuery(createShardContext())); + } + + @Test + public void testValidate() { + try { + new QueryFilterBuilder(null); + fail("cannot be null"); + } catch (IllegalArgumentException e) { + // expected + } + } +} diff --git a/core/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java new file mode 100644 index 00000000000..1b85a26c5ab --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java @@ -0,0 +1,333 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query; + +import org.apache.lucene.index.Term; +import org.apache.lucene.search.*; +import org.apache.lucene.util.automaton.TooComplexToDeterminizeException; +import org.elasticsearch.common.lucene.all.AllTermQuery; +import org.hamcrest.Matchers; +import org.joda.time.DateTimeZone; +import org.junit.Test; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.index.query.QueryBuilders.queryStringQuery; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertBooleanSubQuery; +import static org.hamcrest.CoreMatchers.either; +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.instanceOf; +import static org.hamcrest.Matchers.*; + +public class QueryStringQueryBuilderTests extends AbstractQueryTestCase { + + @Override + protected QueryStringQueryBuilder doCreateTestQueryBuilder() { + int numTerms = randomIntBetween(0, 5); + String query = ""; + for (int i = 0; i < numTerms; i++) { + //min length 4 makes sure that the text is not an operator (AND/OR) so toQuery won't break + query += (randomBoolean() ? STRING_FIELD_NAME + ":" : "") + randomAsciiOfLengthBetween(4, 10) + " "; + } + QueryStringQueryBuilder queryStringQueryBuilder = new QueryStringQueryBuilder(query); + if (randomBoolean()) { + queryStringQueryBuilder.defaultField(randomBoolean() ? STRING_FIELD_NAME : randomAsciiOfLengthBetween(1, 10)); + } + if (randomBoolean()) { + int numFields = randomIntBetween(1, 5); + for (int i = 0; i < numFields; i++) { + String fieldName = randomBoolean() ? STRING_FIELD_NAME : randomAsciiOfLengthBetween(1, 10); + if (randomBoolean()) { + queryStringQueryBuilder.field(fieldName); + } else { + queryStringQueryBuilder.field(fieldName, randomFloat()); + } + } + } + if (randomBoolean()) { + queryStringQueryBuilder.defaultOperator(randomFrom(Operator.values())); + } + if (randomBoolean()) { + //we only use string fields (either mapped or unmapped) + queryStringQueryBuilder.fuzziness(randomFuzziness(STRING_FIELD_NAME)); + } + if (randomBoolean()) { + queryStringQueryBuilder.analyzer(randomAnalyzer()); + } + if (randomBoolean()) { + queryStringQueryBuilder.quoteAnalyzer(randomAnalyzer()); + } + if (randomBoolean()) { + queryStringQueryBuilder.allowLeadingWildcard(randomBoolean()); + } + if (randomBoolean()) { + queryStringQueryBuilder.analyzeWildcard(randomBoolean()); + } + if (randomBoolean()) { + queryStringQueryBuilder.maxDeterminizedStates(randomIntBetween(1, 100)); + } + if (randomBoolean()) { + queryStringQueryBuilder.lowercaseExpandedTerms(randomBoolean()); + } + if (randomBoolean()) { + queryStringQueryBuilder.autoGeneratePhraseQueries(randomBoolean()); + } + if (randomBoolean()) { + queryStringQueryBuilder.enablePositionIncrements(randomBoolean()); + } + if (randomBoolean()) { + queryStringQueryBuilder.lenient(randomBoolean()); + } + if (randomBoolean()) { + queryStringQueryBuilder.escape(randomBoolean()); + } + if (randomBoolean()) { + queryStringQueryBuilder.phraseSlop(randomIntBetween(0, 10)); + } + if (randomBoolean()) { + queryStringQueryBuilder.fuzzyMaxExpansions(randomIntBetween(0, 100)); + } + if (randomBoolean()) { + queryStringQueryBuilder.fuzzyPrefixLength(randomIntBetween(0, 10)); + } + if (randomBoolean()) { + queryStringQueryBuilder.fuzzyRewrite(getRandomRewriteMethod()); + } + if (randomBoolean()) { + queryStringQueryBuilder.rewrite(getRandomRewriteMethod()); + } + if (randomBoolean()) { + queryStringQueryBuilder.quoteFieldSuffix(randomAsciiOfLengthBetween(1, 3)); + } + if (randomBoolean()) { + queryStringQueryBuilder.tieBreaker(randomFloat()); + } + if (randomBoolean()) { + queryStringQueryBuilder.minimumShouldMatch(randomMinimumShouldMatch()); + } + if (randomBoolean()) { + queryStringQueryBuilder.useDisMax(randomBoolean()); + } + if (randomBoolean()) { + queryStringQueryBuilder.locale(randomLocale(getRandom())); + } + if (randomBoolean()) { + queryStringQueryBuilder.timeZone(randomTimeZone()); + } + return queryStringQueryBuilder; + } + + @Override + protected void doAssertLuceneQuery(QueryStringQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { + if ("".equals(queryBuilder.queryString())) { + assertThat(query, instanceOf(MatchNoDocsQuery.class)); + } else { + assertThat(query, either(instanceOf(TermQuery.class)).or(instanceOf(AllTermQuery.class)) + .or(instanceOf(BooleanQuery.class)).or(instanceOf(DisjunctionMaxQuery.class))); + } + } + + @Test + public void testIllegalArguments() { + try { + new QueryStringQueryBuilder(null); + fail("null is not allowed"); + } catch (IllegalArgumentException e) { + // expected + } + } + + @Test + public void testToQueryMatchAllQuery() throws Exception { + Query query = queryStringQuery("*:*").toQuery(createShardContext()); + assertThat(query, instanceOf(MatchAllDocsQuery.class)); + } + + @Test + public void testToQueryTermQuery() throws IOException { + assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); + Query query = queryStringQuery("test").defaultField(STRING_FIELD_NAME).toQuery(createShardContext()); + assertThat(query, instanceOf(TermQuery.class)); + TermQuery termQuery = (TermQuery) query; + assertThat(termQuery.getTerm(), equalTo(new Term(STRING_FIELD_NAME, "test"))); + } + + @Test + public void testToQueryPhraseQuery() throws IOException { + assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); + Query query = queryStringQuery("\"term1 term2\"").defaultField(STRING_FIELD_NAME).phraseSlop(3).toQuery(createShardContext()); + assertThat(query, instanceOf(DisjunctionMaxQuery.class)); + DisjunctionMaxQuery disjunctionMaxQuery = (DisjunctionMaxQuery) query; + assertThat(disjunctionMaxQuery.getDisjuncts().size(), equalTo(1)); + assertThat(disjunctionMaxQuery.getDisjuncts().get(0), instanceOf(PhraseQuery.class)); + PhraseQuery phraseQuery = (PhraseQuery)disjunctionMaxQuery.getDisjuncts().get(0); + assertThat(phraseQuery.getTerms().length, equalTo(2)); + assertThat(phraseQuery.getTerms()[0], equalTo(new Term(STRING_FIELD_NAME, "term1"))); + assertThat(phraseQuery.getTerms()[1], equalTo(new Term(STRING_FIELD_NAME, "term2"))); + assertThat(phraseQuery.getSlop(), equalTo(3)); + } + + @Test + public void testToQueryBoosts() throws Exception { + assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); + QueryShardContext shardContext = createShardContext(); + QueryStringQueryBuilder queryStringQuery = queryStringQuery(STRING_FIELD_NAME + ":boosted^2"); + Query query = queryStringQuery.toQuery(shardContext); + assertThat(query, instanceOf(BoostQuery.class)); + BoostQuery boostQuery = (BoostQuery) query; + assertThat(boostQuery.getBoost(), Matchers.equalTo(2.0f)); + assertThat(boostQuery.getQuery(), instanceOf(TermQuery.class)); + assertThat(((TermQuery) boostQuery.getQuery()).getTerm(), equalTo(new Term(STRING_FIELD_NAME, "boosted"))); + queryStringQuery.boost(2.0f); + query = queryStringQuery.toQuery(shardContext); + assertThat(query, instanceOf(BoostQuery.class)); + assertThat(((BoostQuery) query).getBoost(), Matchers.equalTo(4.0f)); + + queryStringQuery = queryStringQuery("((" + STRING_FIELD_NAME + ":boosted^2) AND (" + STRING_FIELD_NAME + ":foo^1.5))^3"); + query = queryStringQuery.toQuery(shardContext); + assertThat(query, instanceOf(BoostQuery.class)); + boostQuery = (BoostQuery) query; + assertThat(boostQuery.getBoost(), equalTo(3.0f)); + BoostQuery boostQuery1 = assertBooleanSubQuery(boostQuery.getQuery(), BoostQuery.class, 0); + assertThat(boostQuery1.getBoost(), equalTo(2.0f)); + assertThat(boostQuery1.getQuery(), instanceOf(TermQuery.class)); + assertThat(((TermQuery)boostQuery1.getQuery()).getTerm(), equalTo(new Term(STRING_FIELD_NAME, "boosted"))); + BoostQuery boostQuery2 = assertBooleanSubQuery(boostQuery.getQuery(), BoostQuery.class, 1); + assertThat(boostQuery2.getBoost(), equalTo(1.5f)); + assertThat(boostQuery2.getQuery(), instanceOf(TermQuery.class)); + assertThat(((TermQuery)boostQuery2.getQuery()).getTerm(), equalTo(new Term(STRING_FIELD_NAME, "foo"))); + queryStringQuery.boost(2.0f); + query = queryStringQuery.toQuery(shardContext); + assertThat(query, instanceOf(BoostQuery.class)); + boostQuery = (BoostQuery) query; + assertThat(boostQuery.getBoost(), equalTo(6.0f)); + } + + @Test + public void testToQueryMultipleTermsBooleanQuery() throws Exception { + assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); + Query query = queryStringQuery("test1 test2").field(STRING_FIELD_NAME).useDisMax(false).toQuery(createShardContext()); + assertThat(query, instanceOf(BooleanQuery.class)); + BooleanQuery bQuery = (BooleanQuery) query; + assertThat(bQuery.clauses().size(), equalTo(2)); + assertThat(assertBooleanSubQuery(query, TermQuery.class, 0).getTerm(), equalTo(new Term(STRING_FIELD_NAME, "test1"))); + assertThat(assertBooleanSubQuery(query, TermQuery.class, 1).getTerm(), equalTo(new Term(STRING_FIELD_NAME, "test2"))); + } + + @Test + public void testToQueryMultipleFieldsBooleanQuery() throws Exception { + assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); + Query query = queryStringQuery("test").field(STRING_FIELD_NAME).field(STRING_FIELD_NAME_2).useDisMax(false).toQuery(createShardContext()); + assertThat(query, instanceOf(BooleanQuery.class)); + BooleanQuery bQuery = (BooleanQuery) query; + assertThat(bQuery.clauses().size(), equalTo(2)); + assertThat(assertBooleanSubQuery(query, TermQuery.class, 0).getTerm(), equalTo(new Term(STRING_FIELD_NAME, "test"))); + assertThat(assertBooleanSubQuery(query, TermQuery.class, 1).getTerm(), equalTo(new Term(STRING_FIELD_NAME_2, "test"))); + } + + @Test + public void testToQueryMultipleFieldsDisMaxQuery() throws Exception { + assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); + Query query = queryStringQuery("test").field(STRING_FIELD_NAME).field(STRING_FIELD_NAME_2).useDisMax(true).toQuery(createShardContext()); + assertThat(query, instanceOf(DisjunctionMaxQuery.class)); + DisjunctionMaxQuery disMaxQuery = (DisjunctionMaxQuery) query; + List disjuncts = disMaxQuery.getDisjuncts(); + assertThat(((TermQuery) disjuncts.get(0)).getTerm(), equalTo(new Term(STRING_FIELD_NAME, "test"))); + assertThat(((TermQuery) disjuncts.get(1)).getTerm(), equalTo(new Term(STRING_FIELD_NAME_2, "test"))); + } + + @Test + public void testToQueryFieldsWildcard() throws Exception { + assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); + Query query = queryStringQuery("test").field("mapped_str*").useDisMax(false).toQuery(createShardContext()); + assertThat(query, instanceOf(BooleanQuery.class)); + BooleanQuery bQuery = (BooleanQuery) query; + assertThat(bQuery.clauses().size(), equalTo(2)); + assertThat(assertBooleanSubQuery(query, TermQuery.class, 0).getTerm(), equalTo(new Term(STRING_FIELD_NAME, "test"))); + assertThat(assertBooleanSubQuery(query, TermQuery.class, 1).getTerm(), equalTo(new Term(STRING_FIELD_NAME_2, "test"))); + } + + @Test + public void testToQueryDisMaxQuery() throws Exception { + assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); + Query query = queryStringQuery("test").field(STRING_FIELD_NAME, 2.2f).field(STRING_FIELD_NAME_2).useDisMax(true).toQuery(createShardContext()); + assertThat(query, instanceOf(DisjunctionMaxQuery.class)); + DisjunctionMaxQuery disMaxQuery = (DisjunctionMaxQuery) query; + List disjuncts = disMaxQuery.getDisjuncts(); + assertThat(((TermQuery) disjuncts.get(0)).getTerm(), equalTo(new Term(STRING_FIELD_NAME, "test"))); + assertThat((double) disjuncts.get(0).getBoost(), closeTo(2.2, 0.01)); + assertThat(((TermQuery) disjuncts.get(1)).getTerm(), equalTo(new Term(STRING_FIELD_NAME_2, "test"))); + assertThat((double) disjuncts.get(1).getBoost(), closeTo(1, 0.01)); + } + + @Test + public void testToQueryRegExpQuery() throws Exception { + assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); + Query query = queryStringQuery("/foo*bar/").defaultField(STRING_FIELD_NAME).maxDeterminizedStates(5000).toQuery(createShardContext()); + assertThat(query, instanceOf(RegexpQuery.class)); + RegexpQuery regexpQuery = (RegexpQuery) query; + assertTrue(regexpQuery.toString().contains("/foo*bar/")); + } + + @Test(expected = TooComplexToDeterminizeException.class) + public void testToQueryRegExpQueryTooComplex() throws Exception { + assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); + queryStringQuery("/[ac]*a[ac]{50,200}/").defaultField(STRING_FIELD_NAME).toQuery(createShardContext()); + } + + @Test + public void testToQueryNumericRangeQuery() throws Exception { + assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); + Query query = queryStringQuery("12~0.2").defaultField(INT_FIELD_NAME).toQuery(createShardContext()); + NumericRangeQuery fuzzyQuery = (NumericRangeQuery) query; + assertThat(fuzzyQuery.getMin().longValue(), equalTo(12l)); + assertThat(fuzzyQuery.getMax().longValue(), equalTo(12l)); + + } + + @Test + public void testTimezone() throws Exception { + assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); + String queryAsString = "{\n" + + " \"query_string\":{\n" + + " \"time_zone\":\"Europe/Paris\",\n" + + " \"query\":\"" + DATE_FIELD_NAME + ":[2012 TO 2014]\"\n" + + " }\n" + + "}"; + QueryBuilder queryBuilder = parseQuery(queryAsString); + assertThat(queryBuilder, instanceOf(QueryStringQueryBuilder.class)); + QueryStringQueryBuilder queryStringQueryBuilder = (QueryStringQueryBuilder) queryBuilder; + assertThat(queryStringQueryBuilder.timeZone(), equalTo(DateTimeZone.forID("Europe/Paris"))); + + try { + queryAsString = "{\n" + + " \"query_string\":{\n" + + " \"time_zone\":\"This timezone does not exist\",\n" + + " \"query\":\"" + DATE_FIELD_NAME + ":[2012 TO 2014]\"\n" + + " }\n" + + "}"; + parseQuery(queryAsString); + fail("we expect a ParsingException as we are providing an unknown time_zome"); + } catch (IllegalArgumentException e) { + // We expect this one + } + } +} diff --git a/core/src/test/java/org/elasticsearch/index/query/RandomQueryBuilder.java b/core/src/test/java/org/elasticsearch/index/query/RandomQueryBuilder.java new file mode 100644 index 00000000000..2b173bdb1fe --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/query/RandomQueryBuilder.java @@ -0,0 +1,87 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query; + +import com.carrotsearch.randomizedtesting.generators.RandomInts; +import com.carrotsearch.randomizedtesting.generators.RandomStrings; + +import java.util.Random; + +/** + * Utility class for creating random QueryBuilders. + * So far only leaf queries like {@link MatchAllQueryBuilder}, {@link TermQueryBuilder} or + * {@link IdsQueryBuilder} are returned. + */ +public class RandomQueryBuilder { + + /** + * Create a new query of a random type + * @param r random seed + * @return a random {@link QueryBuilder} + */ + public static QueryBuilder createQuery(Random r) { + switch (RandomInts.randomIntBetween(r, 0, 4)) { + case 0: + return new MatchAllQueryBuilderTests().createTestQueryBuilder(); + case 1: + return new TermQueryBuilderTests().createTestQueryBuilder(); + case 2: + return new IdsQueryBuilderTests().createTestQueryBuilder(); + case 3: + return createMultiTermQuery(r); + case 4: + return EmptyQueryBuilder.PROTOTYPE; + default: + throw new UnsupportedOperationException(); + } + } + + /** + * Create a new multi term query of a random type + * @param r random seed + * @return a random {@link MultiTermQueryBuilder} + */ + public static MultiTermQueryBuilder createMultiTermQuery(Random r) { + // for now, only use String Rangequeries for MultiTerm test, numeric and date makes little sense + // see issue #12123 for discussion + switch(RandomInts.randomIntBetween(r, 0, 5)) { + case 0: + RangeQueryBuilder stringRangeQuery = new RangeQueryBuilder(AbstractQueryTestCase.STRING_FIELD_NAME); + stringRangeQuery.from("a" + RandomStrings.randomAsciiOfLengthBetween(r, 1, 10)); + stringRangeQuery.to("z" + RandomStrings.randomAsciiOfLengthBetween(r, 1, 10)); + return stringRangeQuery; + case 1: + RangeQueryBuilder numericRangeQuery = new RangeQueryBuilder(AbstractQueryTestCase.INT_FIELD_NAME); + numericRangeQuery.from(RandomInts.randomIntBetween(r, 1, 100)); + numericRangeQuery.to(RandomInts.randomIntBetween(r, 101, 200)); + return numericRangeQuery; + case 2: + return new FuzzyQueryBuilder(AbstractQueryTestCase.INT_FIELD_NAME, RandomInts.randomInt(r, 1000)); + case 3: + return new FuzzyQueryBuilder(AbstractQueryTestCase.STRING_FIELD_NAME, RandomStrings.randomAsciiOfLengthBetween(r, 1, 10)); + case 4: + return new PrefixQueryBuilderTests().createTestQueryBuilder(); + case 5: + return new WildcardQueryBuilderTests().createTestQueryBuilder(); + default: + throw new UnsupportedOperationException(); + } + } +} diff --git a/core/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java new file mode 100644 index 00000000000..14c1d4f3f49 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java @@ -0,0 +1,329 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query; + +import org.apache.lucene.search.NumericRangeQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.TermRangeQuery; +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.common.lucene.BytesRefs; +import org.joda.time.DateTime; +import org.joda.time.DateTimeZone; +import org.junit.Test; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +import static org.elasticsearch.index.query.QueryBuilders.rangeQuery; +import static org.hamcrest.Matchers.*; + +public class RangeQueryBuilderTests extends AbstractQueryTestCase { + + @Override + protected RangeQueryBuilder doCreateTestQueryBuilder() { + RangeQueryBuilder query; + // switch between numeric and date ranges + switch (randomIntBetween(0, 2)) { + case 0: + // use mapped integer field for numeric range queries + query = new RangeQueryBuilder(INT_FIELD_NAME); + query.from(randomIntBetween(1, 100)); + query.to(randomIntBetween(101, 200)); + break; + case 1: + // use mapped date field, using date string representation + query = new RangeQueryBuilder(DATE_FIELD_NAME); + query.from(new DateTime(System.currentTimeMillis() - randomIntBetween(0, 1000000), DateTimeZone.UTC).toString()); + query.to(new DateTime(System.currentTimeMillis() + randomIntBetween(0, 1000000), DateTimeZone.UTC).toString()); + // Create timestamp option only then we have a date mapper, + // otherwise we could trigger exception. + if (createShardContext().mapperService().smartNameFieldType(DATE_FIELD_NAME) != null) { + if (randomBoolean()) { + query.timeZone(randomTimeZone()); + } + if (randomBoolean()) { + query.format("yyyy-MM-dd'T'HH:mm:ss.SSSZZ"); + } + } + break; + case 2: + default: + query = new RangeQueryBuilder(STRING_FIELD_NAME); + query.from("a" + randomAsciiOfLengthBetween(1, 10)); + query.to("z" + randomAsciiOfLengthBetween(1, 10)); + break; + } + query.includeLower(randomBoolean()).includeUpper(randomBoolean()); + if (randomBoolean()) { + query.from(null); + } + if (randomBoolean()) { + query.to(null); + } + return query; + } + + @Override + protected Map getAlternateVersions() { + Map alternateVersions = new HashMap<>(); + RangeQueryBuilder rangeQueryBuilder = new RangeQueryBuilder(INT_FIELD_NAME); + rangeQueryBuilder.from(randomIntBetween(1, 100)).to(randomIntBetween(101, 200)); + rangeQueryBuilder.includeLower(randomBoolean()); + rangeQueryBuilder.includeUpper(randomBoolean()); + String query = + "{\n" + + " \"range\":{\n" + + " \"" + INT_FIELD_NAME + "\": {\n" + + " \"" + (rangeQueryBuilder.includeLower() ? "gte" : "gt") + "\": " + rangeQueryBuilder.from() + ",\n" + + " \"" + (rangeQueryBuilder.includeUpper() ? "lte" : "lt") + "\": " + rangeQueryBuilder.to() + "\n" + + " }\n" + + " }\n" + + "}"; + alternateVersions.put(query, rangeQueryBuilder); + return alternateVersions; + } + + @Override + protected void doAssertLuceneQuery(RangeQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { + if (getCurrentTypes().length == 0 || (queryBuilder.fieldName().equals(DATE_FIELD_NAME) == false && queryBuilder.fieldName().equals(INT_FIELD_NAME) == false)) { + assertThat(query, instanceOf(TermRangeQuery.class)); + TermRangeQuery termRangeQuery = (TermRangeQuery) query; + assertThat(termRangeQuery.getField(), equalTo(queryBuilder.fieldName())); + assertThat(termRangeQuery.getLowerTerm(), equalTo(BytesRefs.toBytesRef(queryBuilder.from()))); + assertThat(termRangeQuery.getUpperTerm(), equalTo(BytesRefs.toBytesRef(queryBuilder.to()))); + assertThat(termRangeQuery.includesLower(), equalTo(queryBuilder.includeLower())); + assertThat(termRangeQuery.includesUpper(), equalTo(queryBuilder.includeUpper())); + } else if (queryBuilder.fieldName().equals(DATE_FIELD_NAME)) { + //we can't properly test unmapped dates because LateParsingQuery is package private + } else if (queryBuilder.fieldName().equals(INT_FIELD_NAME)) { + assertThat(query, instanceOf(NumericRangeQuery.class)); + NumericRangeQuery numericRangeQuery = (NumericRangeQuery) query; + assertThat(numericRangeQuery.getField(), equalTo(queryBuilder.fieldName())); + assertThat(numericRangeQuery.getMin(), equalTo(queryBuilder.from())); + assertThat(numericRangeQuery.getMax(), equalTo(queryBuilder.to())); + assertThat(numericRangeQuery.includesMin(), equalTo(queryBuilder.includeLower())); + assertThat(numericRangeQuery.includesMax(), equalTo(queryBuilder.includeUpper())); + } else { + throw new UnsupportedOperationException(); + } + } + + @Test + public void testIllegalArguments() { + try { + if (randomBoolean()) { + new RangeQueryBuilder(null); + } else { + new RangeQueryBuilder(""); + } + fail("cannot be null or empty"); + } catch (IllegalArgumentException e) { + // expected + } + + RangeQueryBuilder rangeQueryBuilder = new RangeQueryBuilder("test"); + try { + if (randomBoolean()) { + rangeQueryBuilder.timeZone(null); + } else { + rangeQueryBuilder.timeZone("badID"); + } + fail("cannot be null or unknown id"); + } catch (IllegalArgumentException e) { + // expected + } + + try { + if (randomBoolean()) { + rangeQueryBuilder.format(null); + } else { + rangeQueryBuilder.format("badFormat"); + } + fail("cannot be null or bad format"); + } catch (IllegalArgumentException e) { + // expected + } + } + + /** + * Specifying a timezone together with a numeric range query should throw an exception. + */ + @Test(expected=QueryShardException.class) + public void testToQueryNonDateWithTimezone() throws QueryShardException, IOException { + RangeQueryBuilder query = new RangeQueryBuilder(INT_FIELD_NAME); + query.from(1).to(10).timeZone("UTC"); + query.toQuery(createShardContext()); + } + + /** + * Specifying a timezone together with an unmapped field should throw an exception. + */ + @Test(expected=QueryShardException.class) + public void testToQueryUnmappedWithTimezone() throws QueryShardException, IOException { + RangeQueryBuilder query = new RangeQueryBuilder("bogus_field"); + query.from(1).to(10).timeZone("UTC"); + query.toQuery(createShardContext()); + } + + + @Test + public void testToQueryNumericField() throws IOException { + assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); + Query parsedQuery = rangeQuery(INT_FIELD_NAME).from(23).to(54).includeLower(true).includeUpper(false).toQuery(createShardContext()); + // since age is automatically registered in data, we encode it as numeric + assertThat(parsedQuery, instanceOf(NumericRangeQuery.class)); + NumericRangeQuery rangeQuery = (NumericRangeQuery) parsedQuery; + assertThat(rangeQuery.getField(), equalTo(INT_FIELD_NAME)); + assertThat(rangeQuery.getMin().intValue(), equalTo(23)); + assertThat(rangeQuery.getMax().intValue(), equalTo(54)); + assertThat(rangeQuery.includesMin(), equalTo(true)); + assertThat(rangeQuery.includesMax(), equalTo(false)); + } + + @Test + public void testDateRangeQueryFormat() throws IOException { + assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); + // We test 01/01/2012 from gte and 2030 for lt + String query = "{\n" + + " \"range\" : {\n" + + " \"" + DATE_FIELD_NAME + "\" : {\n" + + " \"gte\": \"01/01/2012\",\n" + + " \"lt\": \"2030\",\n" + + " \"format\": \"dd/MM/yyyy||yyyy\"\n" + + " }\n" + + " }\n" + + "}"; + Query parsedQuery = parseQuery(query).toQuery(createShardContext()).rewrite(null); + assertThat(parsedQuery, instanceOf(NumericRangeQuery.class)); + + // Min value was 01/01/2012 (dd/MM/yyyy) + DateTime min = DateTime.parse("2012-01-01T00:00:00.000+00"); + assertThat(((NumericRangeQuery) parsedQuery).getMin().longValue(), is(min.getMillis())); + + // Max value was 2030 (yyyy) + DateTime max = DateTime.parse("2030-01-01T00:00:00.000+00"); + assertThat(((NumericRangeQuery) parsedQuery).getMax().longValue(), is(max.getMillis())); + + // Test Invalid format + query = "{\n" + + " \"range\" : {\n" + + " \"" + DATE_FIELD_NAME + "\" : {\n" + + " \"gte\": \"01/01/2012\",\n" + + " \"lt\": \"2030\",\n" + + " \"format\": \"yyyy\"\n" + + " }\n" + + " }\n" + + "}"; + try { + parseQuery(query).toQuery(createShardContext()).rewrite(null); + fail("A Range Query with a specific format but with an unexpected date should raise a ParsingException"); + } catch (ElasticsearchParseException e) { + // We expect it + } + } + + @Test + public void testDateRangeBoundaries() throws IOException { + assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); + String query = "{\n" + + " \"range\" : {\n" + + " \"" + DATE_FIELD_NAME + "\" : {\n" + + " \"gte\": \"2014-11-05||/M\",\n" + + " \"lte\": \"2014-12-08||/d\"\n" + + " }\n" + + " }\n" + + "}\n"; + Query parsedQuery = parseQuery(query).toQuery(createShardContext()).rewrite(null); + assertThat(parsedQuery, instanceOf(NumericRangeQuery.class)); + NumericRangeQuery rangeQuery = (NumericRangeQuery) parsedQuery; + + DateTime min = DateTime.parse("2014-11-01T00:00:00.000+00"); + assertThat(rangeQuery.getMin().longValue(), is(min.getMillis())); + assertTrue(rangeQuery.includesMin()); + + DateTime max = DateTime.parse("2014-12-08T23:59:59.999+00"); + assertThat(rangeQuery.getMax().longValue(), is(max.getMillis())); + assertTrue(rangeQuery.includesMax()); + + query = "{\n" + + " \"range\" : {\n" + + " \"" + DATE_FIELD_NAME + "\" : {\n" + + " \"gt\": \"2014-11-05||/M\",\n" + + " \"lt\": \"2014-12-08||/d\"\n" + + " }\n" + + " }\n" + + "}"; + parsedQuery = parseQuery(query).toQuery(createShardContext()).rewrite(null); + assertThat(parsedQuery, instanceOf(NumericRangeQuery.class)); + rangeQuery = (NumericRangeQuery) parsedQuery; + + min = DateTime.parse("2014-11-30T23:59:59.999+00"); + assertThat(rangeQuery.getMin().longValue(), is(min.getMillis())); + assertFalse(rangeQuery.includesMin()); + + max = DateTime.parse("2014-12-08T00:00:00.000+00"); + assertThat(rangeQuery.getMax().longValue(), is(max.getMillis())); + assertFalse(rangeQuery.includesMax()); + } + + @Test + public void testDateRangeQueryTimezone() throws IOException { + assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); + long startDate = System.currentTimeMillis(); + String query = "{\n" + + " \"range\" : {\n" + + " \"" + DATE_FIELD_NAME + "\" : {\n" + + " \"gte\": \"2012-01-01\",\n" + + " \"lte\": \"now\",\n" + + " \"time_zone\": \"+01:00\"\n" + + " }\n" + + " }\n" + + "}"; + Query parsedQuery = parseQuery(query).toQuery(createShardContext()).rewrite(null); + assertThat(parsedQuery, instanceOf(NumericRangeQuery.class)); + + // Min value was 2012-01-01 (UTC) so we need to remove one hour + DateTime min = DateTime.parse("2012-01-01T00:00:00.000+01:00"); + // Max value is when we started the test. So it should be some ms from now + DateTime max = new DateTime(startDate, DateTimeZone.UTC); + + assertThat(((NumericRangeQuery) parsedQuery).getMin().longValue(), is(min.getMillis())); + + // We should not have a big difference here (should be some ms) + assertThat(((NumericRangeQuery) parsedQuery).getMax().longValue() - max.getMillis(), lessThanOrEqualTo(60000L)); + + query = "{\n" + + " \"range\" : {\n" + + " \"" + INT_FIELD_NAME + "\" : {\n" + + " \"gte\": \"0\",\n" + + " \"lte\": \"100\",\n" + + " \"time_zone\": \"-01:00\"\n" + + " }\n" + + " }\n" + + "}"; + try { + parseQuery(query).toQuery(createShardContext()); + fail("A Range Query on a numeric field with a TimeZone should raise a ParsingException"); + } catch (QueryShardException e) { + // We expect it + } + } +} diff --git a/core/src/test/java/org/elasticsearch/index/query/RegexpQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/RegexpQueryBuilderTests.java new file mode 100644 index 00000000000..4649decc76f --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/query/RegexpQueryBuilderTests.java @@ -0,0 +1,86 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query; + +import org.apache.lucene.search.Query; +import org.apache.lucene.search.RegexpQuery; +import org.junit.Test; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; + +public class RegexpQueryBuilderTests extends AbstractQueryTestCase { + + @Override + protected RegexpQueryBuilder doCreateTestQueryBuilder() { + // mapped or unmapped fields + String fieldName = randomBoolean() ? STRING_FIELD_NAME : randomAsciiOfLengthBetween(1, 10); + String value = randomAsciiOfLengthBetween(1, 10); + RegexpQueryBuilder query = new RegexpQueryBuilder(fieldName, value); + + if (randomBoolean()) { + List flags = new ArrayList<>(); + int iter = randomInt(5); + for (int i = 0; i < iter; i++) { + flags.add(randomFrom(RegexpFlag.values())); + } + query.flags(flags.toArray(new RegexpFlag[flags.size()])); + } + if (randomBoolean()) { + query.maxDeterminizedStates(randomInt(50000)); + } + if (randomBoolean()) { + query.rewrite(randomFrom(getRandomRewriteMethod())); + } + return query; + } + + @Override + protected void doAssertLuceneQuery(RegexpQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { + assertThat(query, instanceOf(RegexpQuery.class)); + RegexpQuery regexpQuery = (RegexpQuery) query; + assertThat(regexpQuery.getField(), equalTo(queryBuilder.fieldName())); + } + + @Test + public void testIllegalArguments() { + try { + if (randomBoolean()) { + new RegexpQueryBuilder(null, "text"); + } else { + new RegexpQueryBuilder("", "text"); + } + fail("cannot be null or empty"); + } catch (IllegalArgumentException e) { + // expected + } + + try { + new RegexpQueryBuilder("field", null); + fail("cannot be null or empty"); + } catch (IllegalArgumentException e) { + // expected + } + } +} diff --git a/core/src/test/java/org/elasticsearch/index/query/ScoreModeTests.java b/core/src/test/java/org/elasticsearch/index/query/ScoreModeTests.java new file mode 100644 index 00000000000..1b56d347557 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/query/ScoreModeTests.java @@ -0,0 +1,130 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query; + +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.lucene.search.function.FiltersFunctionScoreQuery; +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.Matchers.equalTo; + +public class ScoreModeTests extends ESTestCase { + + public void testValidOrdinals() { + assertThat(FiltersFunctionScoreQuery.ScoreMode.FIRST.ordinal(), equalTo(0)); + assertThat(FiltersFunctionScoreQuery.ScoreMode.AVG.ordinal(), equalTo(1)); + assertThat(FiltersFunctionScoreQuery.ScoreMode.MAX.ordinal(), equalTo(2)); + assertThat(FiltersFunctionScoreQuery.ScoreMode.SUM.ordinal(), equalTo(3)); + assertThat(FiltersFunctionScoreQuery.ScoreMode.MIN.ordinal(), equalTo(4)); + assertThat(FiltersFunctionScoreQuery.ScoreMode.MULTIPLY.ordinal(), equalTo(5)); + } + + public void testWriteTo() throws Exception { + try (BytesStreamOutput out = new BytesStreamOutput()) { + FiltersFunctionScoreQuery.ScoreMode.FIRST.writeTo(out); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + assertThat(in.readVInt(), equalTo(0)); + } + } + + try (BytesStreamOutput out = new BytesStreamOutput()) { + FiltersFunctionScoreQuery.ScoreMode.AVG.writeTo(out); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + assertThat(in.readVInt(), equalTo(1)); + } + } + + try (BytesStreamOutput out = new BytesStreamOutput()) { + FiltersFunctionScoreQuery.ScoreMode.MAX.writeTo(out); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + assertThat(in.readVInt(), equalTo(2)); + } + } + + try (BytesStreamOutput out = new BytesStreamOutput()) { + FiltersFunctionScoreQuery.ScoreMode.SUM.writeTo(out); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + assertThat(in.readVInt(), equalTo(3)); + } + } + try (BytesStreamOutput out = new BytesStreamOutput()) { + FiltersFunctionScoreQuery.ScoreMode.MIN.writeTo(out); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + assertThat(in.readVInt(), equalTo(4)); + } + } + + try (BytesStreamOutput out = new BytesStreamOutput()) { + FiltersFunctionScoreQuery.ScoreMode.MULTIPLY.writeTo(out); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + assertThat(in.readVInt(), equalTo(5)); + } + } + } + + public void testReadFrom() throws Exception { + try (BytesStreamOutput out = new BytesStreamOutput()) { + out.writeVInt(0); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + assertThat(FiltersFunctionScoreQuery.ScoreMode.readScoreModeFrom(in), equalTo(FiltersFunctionScoreQuery.ScoreMode.FIRST)); + } + } + try (BytesStreamOutput out = new BytesStreamOutput()) { + out.writeVInt(1); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + assertThat(FiltersFunctionScoreQuery.ScoreMode.readScoreModeFrom(in), equalTo(FiltersFunctionScoreQuery.ScoreMode.AVG)); + } + } + try (BytesStreamOutput out = new BytesStreamOutput()) { + out.writeVInt(2); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + assertThat(FiltersFunctionScoreQuery.ScoreMode.readScoreModeFrom(in), equalTo(FiltersFunctionScoreQuery.ScoreMode.MAX)); + } + } + try (BytesStreamOutput out = new BytesStreamOutput()) { + out.writeVInt(3); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + assertThat(FiltersFunctionScoreQuery.ScoreMode.readScoreModeFrom(in), equalTo(FiltersFunctionScoreQuery.ScoreMode.SUM)); + } + } + try (BytesStreamOutput out = new BytesStreamOutput()) { + out.writeVInt(4); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + assertThat(FiltersFunctionScoreQuery.ScoreMode.readScoreModeFrom(in), equalTo(FiltersFunctionScoreQuery.ScoreMode.MIN)); + } + } + try (BytesStreamOutput out = new BytesStreamOutput()) { + out.writeVInt(5); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + assertThat(FiltersFunctionScoreQuery.ScoreMode.readScoreModeFrom(in), equalTo(FiltersFunctionScoreQuery.ScoreMode.MULTIPLY)); + } + } + } + + public void testFromString() { + assertThat(FiltersFunctionScoreQuery.ScoreMode.fromString("first"), equalTo(FiltersFunctionScoreQuery.ScoreMode.FIRST)); + assertThat(FiltersFunctionScoreQuery.ScoreMode.fromString("avg"), equalTo(FiltersFunctionScoreQuery.ScoreMode.AVG)); + assertThat(FiltersFunctionScoreQuery.ScoreMode.fromString("max"), equalTo(FiltersFunctionScoreQuery.ScoreMode.MAX)); + assertThat(FiltersFunctionScoreQuery.ScoreMode.fromString("sum"), equalTo(FiltersFunctionScoreQuery.ScoreMode.SUM)); + assertThat(FiltersFunctionScoreQuery.ScoreMode.fromString("min"), equalTo(FiltersFunctionScoreQuery.ScoreMode.MIN)); + assertThat(FiltersFunctionScoreQuery.ScoreMode.fromString("multiply"), equalTo(FiltersFunctionScoreQuery.ScoreMode.MULTIPLY)); + } +} diff --git a/core/src/test/java/org/elasticsearch/index/query/ScriptQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/ScriptQueryBuilderTests.java new file mode 100644 index 00000000000..87384f78d8f --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/query/ScriptQueryBuilderTests.java @@ -0,0 +1,57 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query; + +import org.apache.lucene.search.Query; +import org.elasticsearch.script.MockScriptEngine; +import org.elasticsearch.script.Script; +import org.elasticsearch.script.ScriptService.ScriptType; +import org.junit.Test; + +import java.io.IOException; +import java.util.Collections; +import java.util.Map; + +import static org.hamcrest.Matchers.instanceOf; + +public class ScriptQueryBuilderTests extends AbstractQueryTestCase { + + @Override + protected ScriptQueryBuilder doCreateTestQueryBuilder() { + String script = "5"; + Map params = Collections.emptyMap(); + return new ScriptQueryBuilder(new Script(script, ScriptType.INLINE, MockScriptEngine.NAME, params)); + } + + @Override + protected void doAssertLuceneQuery(ScriptQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { + assertThat(query, instanceOf(ScriptQueryBuilder.ScriptQuery.class)); + } + + @Test + public void testIllegalConstructorArg() { + try { + new ScriptQueryBuilder(null); + fail("cannot be null"); + } catch (IllegalArgumentException e) { + // expected + } + } +} diff --git a/core/src/test/java/org/elasticsearch/index/query/SimpleIndexQueryParserTests.java b/core/src/test/java/org/elasticsearch/index/query/SimpleIndexQueryParserTests.java deleted file mode 100644 index 478f2c271c1..00000000000 --- a/core/src/test/java/org/elasticsearch/index/query/SimpleIndexQueryParserTests.java +++ /dev/null @@ -1,2151 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.query; - -import org.apache.lucene.analysis.core.WhitespaceAnalyzer; -import org.apache.lucene.index.Fields; -import org.apache.lucene.index.MultiFields; -import org.apache.lucene.index.Term; -import org.apache.lucene.index.Terms; -import org.apache.lucene.index.TermsEnum; -import org.apache.lucene.index.memory.MemoryIndex; -import org.apache.lucene.queries.BoostingQuery; -import org.apache.lucene.queries.ExtendedCommonTermsQuery; -import org.apache.lucene.queries.TermsQuery; -import org.apache.lucene.search.BooleanClause; -import org.apache.lucene.search.BooleanClause.Occur; -import org.apache.lucene.search.BooleanQuery; -import org.apache.lucene.search.BoostQuery; -import org.apache.lucene.search.ConstantScoreQuery; -import org.apache.lucene.search.DisjunctionMaxQuery; -import org.apache.lucene.search.FuzzyQuery; -import org.apache.lucene.search.MatchAllDocsQuery; -import org.apache.lucene.search.MultiTermQuery; -import org.apache.lucene.search.NumericRangeQuery; -import org.apache.lucene.search.PrefixQuery; -import org.apache.lucene.search.Query; -import org.apache.lucene.search.QueryWrapperFilter; -import org.apache.lucene.search.RegexpQuery; -import org.apache.lucene.search.TermQuery; -import org.apache.lucene.search.TermRangeQuery; -import org.apache.lucene.search.WildcardQuery; -import org.apache.lucene.search.spans.FieldMaskingSpanQuery; -import org.apache.lucene.search.spans.SpanContainingQuery; -import org.apache.lucene.search.spans.SpanFirstQuery; -import org.apache.lucene.search.spans.SpanMultiTermQueryWrapper; -import org.apache.lucene.search.spans.SpanNearQuery; -import org.apache.lucene.search.spans.SpanNotQuery; -import org.apache.lucene.search.spans.SpanOrQuery; -import org.apache.lucene.search.spans.SpanTermQuery; -import org.apache.lucene.search.spans.SpanWithinQuery; -import org.apache.lucene.spatial.prefix.IntersectsPrefixTreeFilter; -import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.BytesRefBuilder; -import org.apache.lucene.util.CharsRefBuilder; -import org.apache.lucene.util.NumericUtils; -import org.apache.lucene.util.automaton.TooComplexToDeterminizeException; -import org.elasticsearch.action.termvectors.MultiTermVectorsItemResponse; -import org.elasticsearch.action.termvectors.MultiTermVectorsResponse; -import org.elasticsearch.action.termvectors.TermVectorsRequest; -import org.elasticsearch.action.termvectors.TermVectorsResponse; -import org.elasticsearch.cluster.metadata.MetaData; -import org.elasticsearch.common.ParsingException; -import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.lucene.search.MoreLikeThisQuery; -import org.elasticsearch.common.lucene.search.Queries; -import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery; -import org.elasticsearch.common.lucene.search.function.WeightFactorFunction; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.unit.DistanceUnit; -import org.elasticsearch.common.unit.Fuzziness; -import org.elasticsearch.common.util.set.Sets; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.engine.Engine; -import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.mapper.ParsedDocument; -import org.elasticsearch.index.mapper.core.NumberFieldMapper; -import org.elasticsearch.index.query.MoreLikeThisQueryBuilder.Item; -import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders; -import org.elasticsearch.index.search.geo.GeoDistanceRangeQuery; -import org.elasticsearch.index.search.geo.GeoPolygonQuery; -import org.elasticsearch.index.search.geo.InMemoryGeoBoundingBoxQuery; -import org.elasticsearch.index.search.morelikethis.MoreLikeThisFetchService; -import org.elasticsearch.search.internal.SearchContext; -import org.elasticsearch.test.ESSingleNodeTestCase; -import org.hamcrest.Matchers; -import org.junit.Before; -import org.junit.Test; - -import java.io.IOException; -import java.util.Arrays; -import java.util.EnumSet; -import java.util.HashSet; -import java.util.List; -import java.util.Locale; - -import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.index.query.QueryBuilders.boolQuery; -import static org.elasticsearch.index.query.QueryBuilders.boostingQuery; -import static org.elasticsearch.index.query.QueryBuilders.commonTermsQuery; -import static org.elasticsearch.index.query.QueryBuilders.constantScoreQuery; -import static org.elasticsearch.index.query.QueryBuilders.disMaxQuery; -import static org.elasticsearch.index.query.QueryBuilders.functionScoreQuery; -import static org.elasticsearch.index.query.QueryBuilders.fuzzyQuery; -import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; -import static org.elasticsearch.index.query.QueryBuilders.moreLikeThisQuery; -import static org.elasticsearch.index.query.QueryBuilders.multiMatchQuery; -import static org.elasticsearch.index.query.QueryBuilders.notQuery; -import static org.elasticsearch.index.query.QueryBuilders.prefixQuery; -import static org.elasticsearch.index.query.QueryBuilders.queryStringQuery; -import static org.elasticsearch.index.query.QueryBuilders.rangeQuery; -import static org.elasticsearch.index.query.QueryBuilders.regexpQuery; -import static org.elasticsearch.index.query.QueryBuilders.spanContainingQuery; -import static org.elasticsearch.index.query.QueryBuilders.spanFirstQuery; -import static org.elasticsearch.index.query.QueryBuilders.spanNearQuery; -import static org.elasticsearch.index.query.QueryBuilders.spanNotQuery; -import static org.elasticsearch.index.query.QueryBuilders.spanOrQuery; -import static org.elasticsearch.index.query.QueryBuilders.spanTermQuery; -import static org.elasticsearch.index.query.QueryBuilders.spanWithinQuery; -import static org.elasticsearch.index.query.QueryBuilders.termQuery; -import static org.elasticsearch.index.query.QueryBuilders.termsQuery; -import static org.elasticsearch.index.query.QueryBuilders.wildcardQuery; -import static org.elasticsearch.test.StreamsUtils.copyToBytesFromClasspath; -import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertBooleanSubQuery; -import static org.hamcrest.Matchers.closeTo; -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.instanceOf; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.not; -import static org.hamcrest.Matchers.notNullValue; -import static org.hamcrest.Matchers.nullValue; -import static org.hamcrest.Matchers.sameInstance; - -public class SimpleIndexQueryParserTests extends ESSingleNodeTestCase { - - private IndexQueryParserService queryParser; - private IndexService indexService; - - @Before - public void setup() throws IOException { - Settings settings = Settings.settingsBuilder() - .put("index.cache.filter.type", "none") - .put("name", "SimpleIndexQueryParserTests") - .build(); - IndexService indexService = createIndex("test", settings); - MapperService mapperService = indexService.mapperService(); - - String mapping = copyToStringFromClasspath("/org/elasticsearch/index/query/mapping.json"); - mapperService.merge("person", new CompressedXContent(mapping), true, false); - ParsedDocument doc = mapperService.documentMapper("person").parse("test", "person", "1", new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/query/data.json"))); - assertNotNull(doc.dynamicMappingsUpdate()); - client().admin().indices().preparePutMapping("test").setType("person").setSource(doc.dynamicMappingsUpdate().toString()).get(); - - this.indexService = indexService; - queryParser = indexService.queryParserService(); - } - - private IndexQueryParserService queryParser() throws IOException { - return this.queryParser; - } - - private BytesRef longToPrefixCoded(long val, int shift) { - BytesRefBuilder bytesRef = new BytesRefBuilder(); - NumericUtils.longToPrefixCoded(val, shift, bytesRef); - return bytesRef.get(); - } - - @Test - public void testQueryStringBuilder() throws Exception { - IndexQueryParserService queryParser = queryParser(); - Query parsedQuery = queryParser.parse(queryStringQuery("test").defaultField("content").phraseSlop(1)).query(); - - assertThat(parsedQuery, instanceOf(TermQuery.class)); - TermQuery termQuery = (TermQuery) parsedQuery; - assertThat(termQuery.getTerm(), equalTo(new Term("content", "test"))); - } - - @Test - public void testQueryString() throws Exception { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/query.json"); - Query parsedQuery = queryParser.parse(query).query(); - assertThat(parsedQuery, instanceOf(TermQuery.class)); - TermQuery termQuery = (TermQuery) parsedQuery; - assertThat(termQuery.getTerm(), equalTo(new Term("content", "test"))); - } - - @Test - public void testQueryStringBoostsBuilder() throws Exception { - IndexQueryParserService queryParser = queryParser(); - QueryStringQueryBuilder builder = queryStringQuery("field:boosted^2"); - Query expected = new BoostQuery(new TermQuery(new Term("field", "boosted")), 2); - Query parsedQuery = queryParser.parse(builder).query(); - assertEquals(expected, parsedQuery); - - builder.boost(2.0f); - expected = new BoostQuery(new TermQuery(new Term("field", "boosted")), 4); - parsedQuery = queryParser.parse(builder).query(); - assertEquals(expected, parsedQuery); - - builder = queryStringQuery("((field:boosted^2) AND (field:foo^1.5))^3"); - expected = new BoostQuery(new BooleanQuery.Builder() - .add(new BoostQuery(new TermQuery(new Term("field", "boosted")), 2), Occur.MUST) - .add(new BoostQuery(new TermQuery(new Term("field", "foo")), 1.5f), Occur.MUST) - .build(), 3); - parsedQuery = queryParser.parse(builder).query(); - assertEquals(expected, parsedQuery); - - builder.boost(2.0f); - expected = new BoostQuery(new BooleanQuery.Builder() - .add(new BoostQuery(new TermQuery(new Term("field", "boosted")), 2), Occur.MUST) - .add(new BoostQuery(new TermQuery(new Term("field", "foo")), 1.5f), Occur.MUST) - .build(), 6); - parsedQuery = queryParser.parse(builder).query(); - assertEquals(expected, parsedQuery); - } - - @Test - public void testQueryStringFields1Builder() throws Exception { - IndexQueryParserService queryParser = queryParser(); - Query parsedQuery = queryParser.parse(queryStringQuery("test").field("content").field("name").useDisMax(false)).query(); - assertThat(parsedQuery, instanceOf(BooleanQuery.class)); - BooleanQuery bQuery = (BooleanQuery) parsedQuery; - assertThat(bQuery.clauses().size(), equalTo(2)); - assertThat(assertBooleanSubQuery(parsedQuery, TermQuery.class, 0).getTerm(), equalTo(new Term("content", "test"))); - assertThat(assertBooleanSubQuery(parsedQuery, TermQuery.class, 1).getTerm(), equalTo(new Term("name", "test"))); - } - - @Test - public void testQueryStringFields1() throws Exception { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/query-fields1.json"); - Query parsedQuery = queryParser.parse(query).query(); - assertThat(parsedQuery, instanceOf(BooleanQuery.class)); - BooleanQuery bQuery = (BooleanQuery) parsedQuery; - assertThat(bQuery.clauses().size(), equalTo(2)); - assertThat(assertBooleanSubQuery(parsedQuery, TermQuery.class, 0).getTerm(), equalTo(new Term("content", "test"))); - assertThat(assertBooleanSubQuery(parsedQuery, TermQuery.class, 1).getTerm(), equalTo(new Term("name", "test"))); - } - - @Test - public void testQueryStringFieldsMatch() throws Exception { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/query-fields-match.json"); - Query parsedQuery = queryParser.parse(query).query(); - assertThat(parsedQuery, instanceOf(BooleanQuery.class)); - BooleanQuery bQuery = (BooleanQuery) parsedQuery; - assertThat(bQuery.clauses().size(), equalTo(2)); - assertEquals(Sets.newHashSet(new Term("name.first", "test"), new Term("name.last", "test")), - Sets.newHashSet(assertBooleanSubQuery(parsedQuery, TermQuery.class, 0).getTerm(), - assertBooleanSubQuery(parsedQuery, TermQuery.class, 1).getTerm())); - } - - @Test - public void testQueryStringFields2Builder() throws Exception { - IndexQueryParserService queryParser = queryParser(); - Query parsedQuery = queryParser.parse(queryStringQuery("test").field("content").field("name").useDisMax(true)).query(); - assertThat(parsedQuery, instanceOf(DisjunctionMaxQuery.class)); - DisjunctionMaxQuery disMaxQuery = (DisjunctionMaxQuery) parsedQuery; - List disjuncts = disMaxQuery.getDisjuncts(); - assertThat(((TermQuery) disjuncts.get(0)).getTerm(), equalTo(new Term("content", "test"))); - assertThat(((TermQuery) disjuncts.get(1)).getTerm(), equalTo(new Term("name", "test"))); - } - - @Test - public void testQueryStringFields2() throws Exception { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/query-fields2.json"); - Query parsedQuery = queryParser.parse(query).query(); - assertThat(parsedQuery, instanceOf(DisjunctionMaxQuery.class)); - DisjunctionMaxQuery disMaxQuery = (DisjunctionMaxQuery) parsedQuery; - List disjuncts = disMaxQuery.getDisjuncts(); - assertThat(((TermQuery) disjuncts.get(0)).getTerm(), equalTo(new Term("content", "test"))); - assertThat(((TermQuery) disjuncts.get(1)).getTerm(), equalTo(new Term("name", "test"))); - } - - @Test - public void testQueryStringFields3Builder() throws Exception { - IndexQueryParserService queryParser = queryParser(); - Query parsedQuery = queryParser.parse(queryStringQuery("test").field("content", 2.2f).field("name").useDisMax(true)).query(); - assertThat(parsedQuery, instanceOf(DisjunctionMaxQuery.class)); - DisjunctionMaxQuery disMaxQuery = (DisjunctionMaxQuery) parsedQuery; - List disjuncts = disMaxQuery.getDisjuncts(); - assertThat(((TermQuery) disjuncts.get(0)).getTerm(), equalTo(new Term("content", "test"))); - assertThat((double) disjuncts.get(0).getBoost(), closeTo(2.2, 0.01)); - assertThat(((TermQuery) disjuncts.get(1)).getTerm(), equalTo(new Term("name", "test"))); - assertThat((double) disjuncts.get(1).getBoost(), closeTo(1, 0.01)); - } - - @Test - public void testQueryStringFields3() throws Exception { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/query-fields3.json"); - Query parsedQuery = queryParser.parse(query).query(); - assertThat(parsedQuery, instanceOf(DisjunctionMaxQuery.class)); - DisjunctionMaxQuery disMaxQuery = (DisjunctionMaxQuery) parsedQuery; - List disjuncts = disMaxQuery.getDisjuncts(); - assertThat(((TermQuery) disjuncts.get(0)).getTerm(), equalTo(new Term("content", "test"))); - assertThat((double) disjuncts.get(0).getBoost(), closeTo(2.2, 0.01)); - assertThat(((TermQuery) disjuncts.get(1)).getTerm(), equalTo(new Term("name", "test"))); - assertThat((double) disjuncts.get(1).getBoost(), closeTo(1, 0.01)); - } - - @Test - public void testQueryStringTimezone() throws Exception { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/query-timezone.json"); - Query parsedQuery = queryParser.parse(query).query(); - assertThat(parsedQuery, instanceOf(TermRangeQuery.class)); - - try { - queryParser.parse(copyToStringFromClasspath("/org/elasticsearch/index/query/query-timezone-incorrect.json")); - fail("we expect a QueryParsingException as we are providing an unknown time_zome"); - } catch (ParsingException e) { - // We expect this one - } - } - - @Test - public void testQueryStringRegexp() throws Exception { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/query-regexp-max-determinized-states.json"); - Query parsedQuery = queryParser.parse(query).query(); - assertThat(parsedQuery, instanceOf(RegexpQuery.class)); - RegexpQuery regexpQuery = (RegexpQuery) parsedQuery; - assertTrue(regexpQuery.toString().contains("/foo*bar/")); - } - - @Test - public void testQueryStringRegexpTooManyDeterminizedStates() throws Exception { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/query-regexp-too-many-determinized-states.json"); - try { - queryParser.parse(query).query(); - fail("did not hit exception"); - } catch (ParsingException qpe) { - // expected - assertTrue(qpe.getCause() instanceof TooComplexToDeterminizeException); - } - } - - @Test - public void testMatchAllBuilder() throws Exception { - IndexQueryParserService queryParser = queryParser(); - Query parsedQuery = queryParser.parse(matchAllQuery().boost(1.2f)).query(); - assertThat(parsedQuery, instanceOf(MatchAllDocsQuery.class)); - MatchAllDocsQuery matchAllDocsQuery = (MatchAllDocsQuery) parsedQuery; - assertThat((double) matchAllDocsQuery.getBoost(), closeTo(1.2, 0.01)); - } - - @Test - public void testMatchAll() throws Exception { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/matchAll.json"); - Query parsedQuery = queryParser.parse(query).query(); - assertThat(parsedQuery, instanceOf(MatchAllDocsQuery.class)); - MatchAllDocsQuery matchAllDocsQuery = (MatchAllDocsQuery) parsedQuery; - assertThat((double) matchAllDocsQuery.getBoost(), closeTo(1.2, 0.01)); - } - - @Test - public void testMatchAllEmpty1() throws Exception { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/match_all_empty1.json"); - Query parsedQuery = queryParser.parse(query).query(); - assertThat(parsedQuery, equalTo(Queries.newMatchAllQuery())); - assertThat(parsedQuery, not(sameInstance(Queries.newMatchAllQuery()))); - } - - @Test - public void testMatchAllEmpty2() throws Exception { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/match_all_empty2.json"); - Query parsedQuery = queryParser.parse(query).query(); - assertThat(parsedQuery, equalTo(Queries.newMatchAllQuery())); - assertThat(parsedQuery, not(sameInstance(Queries.newMatchAllQuery()))); - - } - - @Test - public void testStarColonStar() throws Exception { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/starColonStar.json"); - Query parsedQuery = queryParser.parse(query).query(); - assertThat(parsedQuery, instanceOf(MatchAllDocsQuery.class)); - } - - @Test - public void testDisMaxBuilder() throws Exception { - IndexQueryParserService queryParser = queryParser(); - Query parsedQuery = queryParser.parse(disMaxQuery().boost(1.2f).tieBreaker(0.7f).add(termQuery("name.first", "first")).add(termQuery("name.last", "last"))).query(); - assertThat(parsedQuery, instanceOf(DisjunctionMaxQuery.class)); - DisjunctionMaxQuery disjunctionMaxQuery = (DisjunctionMaxQuery) parsedQuery; - assertThat((double) disjunctionMaxQuery.getBoost(), closeTo(1.2, 0.01)); - - List disjuncts = disjunctionMaxQuery.getDisjuncts(); - assertThat(disjuncts.size(), equalTo(2)); - - Query firstQ = disjuncts.get(0); - assertThat(firstQ, instanceOf(TermQuery.class)); - assertThat(((TermQuery) firstQ).getTerm(), equalTo(new Term("name.first", "first"))); - - Query secondsQ = disjuncts.get(1); - assertThat(secondsQ, instanceOf(TermQuery.class)); - assertThat(((TermQuery) secondsQ).getTerm(), equalTo(new Term("name.last", "last"))); - } - - @Test - public void testDisMax() throws Exception { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/disMax.json"); - Query parsedQuery = queryParser.parse(query).query(); - assertThat(parsedQuery, instanceOf(DisjunctionMaxQuery.class)); - DisjunctionMaxQuery disjunctionMaxQuery = (DisjunctionMaxQuery) parsedQuery; - assertThat((double) disjunctionMaxQuery.getBoost(), closeTo(1.2, 0.01)); - - List disjuncts = disjunctionMaxQuery.getDisjuncts(); - assertThat(disjuncts.size(), equalTo(2)); - - Query firstQ = disjuncts.get(0); - assertThat(firstQ, instanceOf(TermQuery.class)); - assertThat(((TermQuery) firstQ).getTerm(), equalTo(new Term("name.first", "first"))); - - Query secondsQ = disjuncts.get(1); - assertThat(secondsQ, instanceOf(TermQuery.class)); - assertThat(((TermQuery) secondsQ).getTerm(), equalTo(new Term("name.last", "last"))); - } - - @Test - public void testDisMax2() throws Exception { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/disMax2.json"); - Query parsedQuery = queryParser.parse(query).query(); - assertThat(parsedQuery, instanceOf(DisjunctionMaxQuery.class)); - DisjunctionMaxQuery disjunctionMaxQuery = (DisjunctionMaxQuery) parsedQuery; - - List disjuncts = disjunctionMaxQuery.getDisjuncts(); - assertThat(disjuncts.size(), equalTo(1)); - - PrefixQuery firstQ = (PrefixQuery) disjuncts.get(0); - // since age is automatically registered in data, we encode it as numeric - assertThat(firstQ.getPrefix(), equalTo(new Term("name.first", "sh"))); - assertThat((double) firstQ.getBoost(), closeTo(1.2, 0.00001)); - } - - @Test - public void testTermQueryBuilder() throws IOException { - IndexQueryParserService queryParser = queryParser(); - Query parsedQuery = queryParser.parse(termQuery("age", 34).buildAsBytes()).query(); - TermQuery fieldQuery = unwrapTermQuery(parsedQuery); - assertThat(fieldQuery.getTerm().bytes(), equalTo(indexedValueForSearch(34l))); - } - - @Test - public void testTermQuery() throws IOException { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/term.json"); - TermQuery fieldQuery = unwrapTermQuery(queryParser.parse(query).query()); - assertThat(fieldQuery.getTerm().bytes(), equalTo(indexedValueForSearch(34l))); - } - - @Test(expected = ParsingException.class) - public void testTermQueryArrayInvalid() throws IOException { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/term-array-invalid.json"); - unwrapTermQuery(queryParser.parse(query).query()); - } - - private static TermQuery unwrapTermQuery(Query q) { - assertThat(q, instanceOf(TermQuery.class)); - return (TermQuery) q; - } - - @Test - public void testFuzzyQueryBuilder() throws IOException { - IndexQueryParserService queryParser = queryParser(); - Query parsedQuery = queryParser.parse(fuzzyQuery("name.first", "sh").buildAsBytes()).query(); - assertThat(parsedQuery, instanceOf(FuzzyQuery.class)); - FuzzyQuery fuzzyQuery = (FuzzyQuery) parsedQuery; - assertThat(fuzzyQuery.getTerm(), equalTo(new Term("name.first", "sh"))); - } - - @Test - public void testFuzzyQuery() throws IOException { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/fuzzy.json"); - Query parsedQuery = queryParser.parse(query).query(); - assertThat(parsedQuery, instanceOf(FuzzyQuery.class)); - FuzzyQuery fuzzyQuery = (FuzzyQuery) parsedQuery; - assertThat(fuzzyQuery.getTerm(), equalTo(new Term("name.first", "sh"))); - assertThat(fuzzyQuery.getRewriteMethod(), instanceOf(MultiTermQuery.TopTermsBlendedFreqScoringRewrite.class)); - } - - @Test - public void testFuzzyQueryWithFieldsBuilder() throws IOException { - IndexQueryParserService queryParser = queryParser(); - Query parsedQuery = queryParser.parse(fuzzyQuery("name.first", "sh").fuzziness(Fuzziness.ONE).prefixLength(1).boost(2.0f).buildAsBytes()).query(); - assertThat(parsedQuery, instanceOf(FuzzyQuery.class)); - FuzzyQuery fuzzyQuery = (FuzzyQuery) parsedQuery; - assertThat(fuzzyQuery.getTerm(), equalTo(new Term("name.first", "sh"))); - assertThat(fuzzyQuery.getMaxEdits(), equalTo(FuzzyQuery.floatToEdits(0.1f, "sh".length()))); - assertThat(fuzzyQuery.getPrefixLength(), equalTo(1)); - assertThat(fuzzyQuery.getBoost(), equalTo(2.0f)); - } - - @Test - public void testFuzzyQueryWithFields() throws IOException { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/fuzzy-with-fields.json"); - Query parsedQuery = queryParser.parse(query).query(); - assertThat(parsedQuery, instanceOf(FuzzyQuery.class)); - FuzzyQuery fuzzyQuery = (FuzzyQuery) parsedQuery; - assertThat(fuzzyQuery.getTerm(), equalTo(new Term("name.first", "sh"))); - assertThat(fuzzyQuery.getMaxEdits(), equalTo(Fuzziness.AUTO.asDistance("sh"))); - assertThat(fuzzyQuery.getPrefixLength(), equalTo(1)); - assertThat(fuzzyQuery.getBoost(), equalTo(2.0f)); - } - - @Test - public void testFuzzyQueryWithFields2() throws IOException { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/fuzzy-with-fields2.json"); - Query parsedQuery = queryParser.parse(query).query(); - assertThat(parsedQuery, instanceOf(NumericRangeQuery.class)); - NumericRangeQuery fuzzyQuery = (NumericRangeQuery) parsedQuery; - assertThat(fuzzyQuery.getMin().longValue(), equalTo(7l)); - assertThat(fuzzyQuery.getMax().longValue(), equalTo(17l)); - } - - @Test - public void testTermWithBoostQueryBuilder() throws IOException { - IndexQueryParserService queryParser = queryParser(); - - Query parsedQuery = queryParser.parse(termQuery("age", 34).boost(2.0f)).query(); - TermQuery fieldQuery = unwrapTermQuery(parsedQuery); - assertThat(fieldQuery.getTerm().bytes(), equalTo(indexedValueForSearch(34l))); - assertThat((double) parsedQuery.getBoost(), closeTo(2.0, 0.01)); - } - - private BytesRef indexedValueForSearch(long value) { - BytesRefBuilder bytesRef = new BytesRefBuilder(); - NumericUtils.longToPrefixCoded(value, 0, bytesRef); // 0 because of - // exact - // match - return bytesRef.get(); - } - - @Test - public void testTermWithBoostQuery() throws IOException { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/term-with-boost.json"); - Query parsedQuery = queryParser.parse(query).query(); - TermQuery fieldQuery = unwrapTermQuery(parsedQuery); - assertThat(fieldQuery.getTerm().bytes(), equalTo(indexedValueForSearch(34l))); - assertThat((double) parsedQuery.getBoost(), closeTo(2.0, 0.01)); - } - - @Test - public void testPrefixQueryBuilder() throws IOException { - IndexQueryParserService queryParser = queryParser(); - Query parsedQuery = queryParser.parse(prefixQuery("name.first", "sh")).query(); - assertThat(parsedQuery, instanceOf(PrefixQuery.class)); - PrefixQuery prefixQuery = (PrefixQuery) parsedQuery; - // since age is automatically registered in data, we encode it as numeric - assertThat(prefixQuery.getPrefix(), equalTo(new Term("name.first", "sh"))); - } - - @Test - public void testPrefixQuery() throws IOException { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/prefix.json"); - Query parsedQuery = queryParser.parse(query).query(); - assertThat(parsedQuery, instanceOf(PrefixQuery.class)); - PrefixQuery prefixQuery = (PrefixQuery) parsedQuery; - // since age is automatically registered in data, we encode it as numeric - assertThat(prefixQuery.getPrefix(), equalTo(new Term("name.first", "sh"))); - } - - @Test - public void testPrefixBoostQuery() throws IOException { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/prefix-boost.json"); - Query parsedQuery = queryParser.parse(query).query(); - assertThat(parsedQuery, instanceOf(PrefixQuery.class)); - PrefixQuery prefixQuery = (PrefixQuery) parsedQuery; - // since age is automatically registered in data, we encode it as numeric - assertThat(prefixQuery.getPrefix(), equalTo(new Term("name.first", "sh"))); - assertThat((double) prefixQuery.getBoost(), closeTo(1.2, 0.00001)); - } - - @Test - public void testPrefixQueryBoostQueryBuilder() throws IOException { - IndexQueryParserService queryParser = queryParser(); - Query parsedQuery = queryParser.parse(prefixQuery("name.first", "sh").boost(2.0f)).query(); - assertThat(parsedQuery, instanceOf(PrefixQuery.class)); - PrefixQuery prefixQuery = (PrefixQuery) parsedQuery; - assertThat(prefixQuery.getPrefix(), equalTo(new Term("name.first", "sh"))); - assertThat((double) prefixQuery.getBoost(), closeTo(2.0, 0.01)); - } - - @Test - public void testPrefixQueryBoostQuery() throws IOException { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/prefix-with-boost.json"); - Query parsedQuery = queryParser.parse(query).query(); - assertThat(parsedQuery, instanceOf(PrefixQuery.class)); - PrefixQuery prefixQuery = (PrefixQuery) parsedQuery; - assertThat(prefixQuery.getPrefix(), equalTo(new Term("name.first", "sh"))); - assertThat((double) prefixQuery.getBoost(), closeTo(2.0, 0.01)); - } - - @Test - public void testPrefixQueryWithUnknownField() throws IOException { - IndexQueryParserService queryParser = queryParser(); - Query parsedQuery = queryParser.parse(prefixQuery("unknown", "sh")).query(); - assertThat(parsedQuery, instanceOf(PrefixQuery.class)); - PrefixQuery prefixQuery = (PrefixQuery) parsedQuery; - assertThat(prefixQuery.getPrefix(), equalTo(new Term("unknown", "sh"))); - assertThat(prefixQuery.getRewriteMethod(), notNullValue()); - } - - @Test - public void testRegexpQueryBuilder() throws IOException { - IndexQueryParserService queryParser = queryParser(); - Query parsedQuery = queryParser.parse(regexpQuery("name.first", "s.*y")).query(); - assertThat(parsedQuery, instanceOf(RegexpQuery.class)); - RegexpQuery regexpQuery = (RegexpQuery) parsedQuery; - assertThat(regexpQuery.getField(), equalTo("name.first")); - } - - @Test - public void testRegexpQuery() throws IOException { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/regexp.json"); - Query parsedQuery = queryParser.parse(query).query(); - assertThat(parsedQuery, instanceOf(RegexpQuery.class)); - RegexpQuery regexpQuery = (RegexpQuery) parsedQuery; - assertThat(regexpQuery.getField(), equalTo("name.first")); - } - - @Test - public void testRegexpQueryWithMaxDeterminizedStates() throws IOException { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/regexp-max-determinized-states.json"); - Query parsedQuery = queryParser.parse(query).query(); - assertThat(parsedQuery, instanceOf(RegexpQuery.class)); - RegexpQuery regexpQuery = (RegexpQuery) parsedQuery; - assertThat(regexpQuery.getField(), equalTo("name.first")); - } - - @Test - public void testRegexpBoostQuery() throws IOException { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/regexp-boost.json"); - Query parsedQuery = queryParser.parse(query).query(); - assertThat(parsedQuery, instanceOf(RegexpQuery.class)); - RegexpQuery regexpQuery = (RegexpQuery) parsedQuery; - assertThat(regexpQuery.getField(), equalTo("name.first")); - assertThat(regexpQuery.getBoost(), equalTo(1.2f)); - } - - @Test - public void testWildcardQueryBuilder() throws IOException { - IndexQueryParserService queryParser = queryParser(); - Query parsedQuery = queryParser.parse(wildcardQuery("name.first", "sh*")).query(); - assertThat(parsedQuery, instanceOf(WildcardQuery.class)); - WildcardQuery wildcardQuery = (WildcardQuery) parsedQuery; - assertThat(wildcardQuery.getTerm(), equalTo(new Term("name.first", "sh*"))); - } - - @Test - public void testWildcardQuery() throws IOException { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/wildcard.json"); - Query parsedQuery = queryParser.parse(query).query(); - assertThat(parsedQuery, instanceOf(WildcardQuery.class)); - WildcardQuery wildcardQuery = (WildcardQuery) parsedQuery; - assertThat(wildcardQuery.getTerm(), equalTo(new Term("name.first", "sh*"))); - } - - @Test - public void testWildcardBoostQuery() throws IOException { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/wildcard-boost.json"); - Query parsedQuery = queryParser.parse(query).query(); - assertThat(parsedQuery, instanceOf(WildcardQuery.class)); - WildcardQuery wildcardQuery = (WildcardQuery) parsedQuery; - assertThat(wildcardQuery.getTerm(), equalTo(new Term("name.first", "sh*"))); - assertThat((double) wildcardQuery.getBoost(), closeTo(1.2, 0.00001)); - } - - @Test - public void testRangeQueryBuilder() throws IOException { - IndexQueryParserService queryParser = queryParser(); - Query parsedQuery = queryParser.parse(rangeQuery("age").from(23).to(54).includeLower(true).includeUpper(false)).query(); - // since age is automatically registered in data, we encode it as numeric - assertThat(parsedQuery, instanceOf(NumericRangeQuery.class)); - NumericRangeQuery rangeQuery = (NumericRangeQuery) parsedQuery; - assertThat(rangeQuery.getField(), equalTo("age")); - assertThat(rangeQuery.getMin().intValue(), equalTo(23)); - assertThat(rangeQuery.getMax().intValue(), equalTo(54)); - assertThat(rangeQuery.includesMin(), equalTo(true)); - assertThat(rangeQuery.includesMax(), equalTo(false)); - } - - @Test - public void testRangeQuery() throws IOException { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/range.json"); - Query parsedQuery = queryParser.parse(query).query(); - // since age is automatically registered in data, we encode it as numeric - assertThat(parsedQuery, instanceOf(NumericRangeQuery.class)); - NumericRangeQuery rangeQuery = (NumericRangeQuery) parsedQuery; - assertThat(rangeQuery.getField(), equalTo("age")); - assertThat(rangeQuery.getMin().intValue(), equalTo(23)); - assertThat(rangeQuery.getMax().intValue(), equalTo(54)); - assertThat(rangeQuery.includesMin(), equalTo(true)); - assertThat(rangeQuery.includesMax(), equalTo(false)); - } - - @Test - public void testRange2Query() throws IOException { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/range2.json"); - Query parsedQuery = queryParser.parse(query).query(); - // since age is automatically registered in data, we encode it as numeric - assertThat(parsedQuery, instanceOf(NumericRangeQuery.class)); - NumericRangeQuery rangeQuery = (NumericRangeQuery) parsedQuery; - assertThat(rangeQuery.getField(), equalTo("age")); - assertThat(rangeQuery.getMin().intValue(), equalTo(23)); - assertThat(rangeQuery.getMax().intValue(), equalTo(54)); - assertThat(rangeQuery.includesMin(), equalTo(true)); - assertThat(rangeQuery.includesMax(), equalTo(false)); - } - - @Test - public void testNotFilteredQueryBuilder() throws IOException { - IndexQueryParserService queryParser = queryParser(); - Query parsedQuery = queryParser.parse(notQuery(termQuery("name.first", "shay1"))).query(); - assertEquals(Queries.not(new TermQuery(new Term("name.first", "shay1"))), parsedQuery); - } - - @Test - public void testNotFilteredQuery() throws IOException { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/not-filter.json"); - Query parsedQuery = queryParser.parse(query).query(); - Query expected = - Queries.not(new TermQuery(new Term("name.first", "shay1"))); - assertEquals(expected, parsedQuery); - } - - @Test - public void testNotFilteredQuery2() throws IOException { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/not-filter2.json"); - Query parsedQuery = queryParser.parse(query).query(); - Query expected = Queries.not(new TermQuery(new Term("name.first", "shay1"))); - assertEquals(expected, parsedQuery); - } - - @Test - public void testNotFilteredQuery3() throws IOException { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/not-filter3.json"); - Query parsedQuery = queryParser.parse(query).query(); - Query expected = Queries.not(new TermQuery(new Term("name.first", "shay1"))); - assertEquals(expected, parsedQuery); - } - - @Test - public void testBoostingQueryBuilder() throws IOException { - IndexQueryParserService queryParser = queryParser(); - Query parsedQuery = queryParser.parse(boostingQuery().positive(termQuery("field1", "value1")).negative(termQuery("field1", "value2")).negativeBoost(0.2f)).query(); - assertThat(parsedQuery, instanceOf(BoostingQuery.class)); - } - - @Test - public void testBoostingQuery() throws IOException { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/boosting-query.json"); - Query parsedQuery = queryParser.parse(query).query(); - assertThat(parsedQuery, instanceOf(BoostingQuery.class)); - } - - @Test - public void testQueryStringFuzzyNumeric() throws IOException { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/query2.json"); - Query parsedQuery = queryParser.parse(query).query(); - assertThat(parsedQuery, instanceOf(NumericRangeQuery.class)); - NumericRangeQuery fuzzyQuery = (NumericRangeQuery) parsedQuery; - assertThat(fuzzyQuery.getMin().longValue(), equalTo(12l)); - assertThat(fuzzyQuery.getMax().longValue(), equalTo(12l)); - } - - @Test - public void testBoolQueryBuilder() throws IOException { - IndexQueryParserService queryParser = queryParser(); - Query parsedQuery = queryParser.parse(boolQuery().must(termQuery("content", "test1")).must(termQuery("content", "test4")).mustNot(termQuery("content", "test2")).should(termQuery("content", "test3"))).query(); - assertThat(parsedQuery, instanceOf(BooleanQuery.class)); - BooleanQuery booleanQuery = (BooleanQuery) parsedQuery; - BooleanClause[] clauses = booleanQuery.getClauses(); - - assertThat(clauses.length, equalTo(4)); - - assertThat(((TermQuery) clauses[0].getQuery()).getTerm(), equalTo(new Term("content", "test1"))); - assertThat(clauses[0].getOccur(), equalTo(BooleanClause.Occur.MUST)); - - assertThat(((TermQuery) clauses[1].getQuery()).getTerm(), equalTo(new Term("content", "test4"))); - assertThat(clauses[1].getOccur(), equalTo(BooleanClause.Occur.MUST)); - - assertThat(((TermQuery) clauses[2].getQuery()).getTerm(), equalTo(new Term("content", "test2"))); - assertThat(clauses[2].getOccur(), equalTo(BooleanClause.Occur.MUST_NOT)); - - assertThat(((TermQuery) clauses[3].getQuery()).getTerm(), equalTo(new Term("content", "test3"))); - assertThat(clauses[3].getOccur(), equalTo(BooleanClause.Occur.SHOULD)); - } - - @Test - public void testBoolQuery() throws IOException { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/bool.json"); - Query parsedQuery = queryParser.parse(query).query(); - assertThat(parsedQuery, instanceOf(BooleanQuery.class)); - BooleanQuery booleanQuery = (BooleanQuery) parsedQuery; - BooleanClause[] clauses = booleanQuery.getClauses(); - - assertThat(clauses.length, equalTo(4)); - - assertThat(((TermQuery) clauses[0].getQuery()).getTerm(), equalTo(new Term("content", "test1"))); - assertThat(clauses[0].getOccur(), equalTo(BooleanClause.Occur.MUST)); - - assertThat(((TermQuery) clauses[1].getQuery()).getTerm(), equalTo(new Term("content", "test4"))); - assertThat(clauses[1].getOccur(), equalTo(BooleanClause.Occur.MUST)); - - assertThat(((TermQuery) clauses[2].getQuery()).getTerm(), equalTo(new Term("content", "test2"))); - assertThat(clauses[2].getOccur(), equalTo(BooleanClause.Occur.MUST_NOT)); - - assertThat(((TermQuery) clauses[3].getQuery()).getTerm(), equalTo(new Term("content", "test3"))); - assertThat(clauses[3].getOccur(), equalTo(BooleanClause.Occur.SHOULD)); - } - - @Test - public void testTermsQueryBuilder() throws IOException { - IndexQueryParserService queryParser = queryParser(); - Query parsedQuery = queryParser.parse(termsQuery("name.first", Arrays.asList("shay", "test"))).query(); - assertThat(parsedQuery, instanceOf(BooleanQuery.class)); - BooleanQuery booleanQuery = (BooleanQuery) parsedQuery; - BooleanClause[] clauses = booleanQuery.getClauses(); - - assertThat(clauses.length, equalTo(2)); - - assertThat(((TermQuery) clauses[0].getQuery()).getTerm(), equalTo(new Term("name.first", "shay"))); - assertThat(clauses[0].getOccur(), equalTo(BooleanClause.Occur.SHOULD)); - - assertThat(((TermQuery) clauses[1].getQuery()).getTerm(), equalTo(new Term("name.first", "test"))); - assertThat(clauses[1].getOccur(), equalTo(BooleanClause.Occur.SHOULD)); - } - - @Test - public void testTermsQuery() throws IOException { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/terms-query.json"); - Query parsedQuery = queryParser.parse(query).query(); - assertThat(parsedQuery, instanceOf(BooleanQuery.class)); - BooleanQuery booleanQuery = (BooleanQuery) parsedQuery; - BooleanClause[] clauses = booleanQuery.getClauses(); - - assertThat(clauses.length, equalTo(2)); - - assertThat(((TermQuery) clauses[0].getQuery()).getTerm(), equalTo(new Term("name.first", "shay"))); - assertThat(clauses[0].getOccur(), equalTo(BooleanClause.Occur.SHOULD)); - - assertThat(((TermQuery) clauses[1].getQuery()).getTerm(), equalTo(new Term("name.first", "test"))); - assertThat(clauses[1].getOccur(), equalTo(BooleanClause.Occur.SHOULD)); - - assertFalse("terms query disable_coord disabled by default", booleanQuery.isCoordDisabled()); - } - - @Test - public void testTermsQueryOptions() throws IOException { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/terms-query-options.json"); - Query parsedQuery = queryParser.parse(query).query(); - assertThat(parsedQuery, instanceOf(BooleanQuery.class)); - BooleanQuery booleanQuery = (BooleanQuery) parsedQuery; - BooleanClause[] clauses = booleanQuery.getClauses(); - - assertThat(clauses.length, equalTo(3)); - - assertThat(((TermQuery) clauses[0].getQuery()).getTerm(), equalTo(new Term("name.first", "shay"))); - assertThat(clauses[0].getOccur(), equalTo(BooleanClause.Occur.SHOULD)); - - assertThat(((TermQuery) clauses[1].getQuery()).getTerm(), equalTo(new Term("name.first", "test"))); - assertThat(clauses[1].getOccur(), equalTo(BooleanClause.Occur.SHOULD)); - - assertThat(((TermQuery) clauses[2].getQuery()).getTerm(), equalTo(new Term("name.first", "elasticsearch"))); - assertThat(clauses[2].getOccur(), equalTo(BooleanClause.Occur.SHOULD)); - - assertTrue("terms query disable_coord option mismatch", booleanQuery.isCoordDisabled()); - assertThat(booleanQuery.getBoost(), equalTo(2.0f)); - assertThat(booleanQuery.getMinimumNumberShouldMatch(), equalTo(2)); - } - - @Test - public void testTermsQueryWithMultipleFields() throws IOException { - IndexQueryParserService queryParser = queryParser(); - String query = XContentFactory.jsonBuilder().startObject() - .startObject("terms").array("foo", 123).array("bar", 456).endObject() - .endObject().string(); - try { - queryParser.parse(query).query(); - fail(); - } catch (ParsingException ex) { - assertThat(ex.getMessage(), equalTo("[terms] query does not support multiple fields")); - } - } - - @Test - public void testTermsFilterWithMultipleFields() throws IOException { - IndexQueryParserService queryParser = queryParser(); - String query = XContentFactory.jsonBuilder().startObject() - .startObject("terms").array("foo", 123).array("bar", 456) - .endObject().string(); - try { - queryParser.parse(query).query(); - fail(); - } catch (ParsingException ex) { - assertThat(ex.getMessage(), equalTo("[terms] query does not support multiple fields")); - } - } - - - - @Test - public void testInQuery() throws IOException { - IndexQueryParserService queryParser = queryParser(); - Query parsedQuery = queryParser.parse(termsQuery("name.first", Arrays.asList("test1", "test2", "test3"))).query(); - assertThat(parsedQuery, instanceOf(BooleanQuery.class)); - BooleanQuery booleanQuery = (BooleanQuery) parsedQuery; - BooleanClause[] clauses = booleanQuery.getClauses(); - - assertThat(clauses.length, equalTo(3)); - - assertThat(((TermQuery) clauses[0].getQuery()).getTerm(), equalTo(new Term("name.first", "test1"))); - assertThat(clauses[0].getOccur(), equalTo(BooleanClause.Occur.SHOULD)); - - assertThat(((TermQuery) clauses[1].getQuery()).getTerm(), equalTo(new Term("name.first", "test2"))); - assertThat(clauses[1].getOccur(), equalTo(BooleanClause.Occur.SHOULD)); - - assertThat(((TermQuery) clauses[2].getQuery()).getTerm(), equalTo(new Term("name.first", "test3"))); - assertThat(clauses[2].getOccur(), equalTo(BooleanClause.Occur.SHOULD)); - } - - private void assertQueryParsingFailureDueToMultipleTermsInTermFilter(String query) throws IOException { - IndexQueryParserService queryParser = queryParser(); - try { - queryParser.parse(query); - fail("Expected Query Parsing Exception but did not happen"); - } catch (ParsingException e) { - assertThat(e.getMessage(), containsString("[term] query does not support different field names, use [bool] query instead")); - } - } - - @Test - public void testTermsFilterQueryBuilder() throws Exception { - IndexQueryParserService queryParser = queryParser(); - Query parsedQuery = queryParser.parse(constantScoreQuery(termsQuery("name.last", "banon", "kimchy"))).query(); - Query expected = new ConstantScoreQuery(new TermsQuery("name.last", new BytesRef("banon"), new BytesRef("kimchy"))); - assertEquals(expected, parsedQuery); - } - - - @Test - public void testTermsFilterQuery() throws Exception { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/terms-filter.json"); - Query parsedQuery = queryParser.parse(query).query(); - Query expected = new ConstantScoreQuery(new TermsQuery("name.last", new BytesRef("banon"), new BytesRef("kimchy"))); - assertEquals(expected, parsedQuery); - } - - @Test - public void testTermsWithNameFilterQuery() throws Exception { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/terms-filter-named.json"); - ParsedQuery parsedQuery = queryParser.parse(query); - assertThat(parsedQuery.namedFilters().containsKey("test"), equalTo(true)); - Query expected = new ConstantScoreQuery(new TermsQuery("name.last", new BytesRef("banon"), new BytesRef("kimchy"))); - assertEquals(expected, parsedQuery.query()); - } - - @Test - public void testConstantScoreQueryBuilder() throws IOException { - IndexQueryParserService queryParser = queryParser(); - Query parsedQuery = queryParser.parse(constantScoreQuery(termQuery("name.last", "banon"))).query(); - assertThat(parsedQuery, instanceOf(ConstantScoreQuery.class)); - ConstantScoreQuery constantScoreQuery = (ConstantScoreQuery) parsedQuery; - assertThat(getTerm(constantScoreQuery.getQuery()), equalTo(new Term("name.last", "banon"))); - } - - @Test - public void testConstantScoreQuery() throws IOException { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/constantScore-query.json"); - Query parsedQuery = queryParser.parse(query).query(); - assertThat(parsedQuery, instanceOf(ConstantScoreQuery.class)); - ConstantScoreQuery constantScoreQuery = (ConstantScoreQuery) parsedQuery; - assertThat(getTerm(constantScoreQuery.getQuery()), equalTo(new Term("name.last", "banon"))); - } - - @Test - public void testCustomWeightFactorQueryBuilder_withFunctionScore() throws IOException { - IndexQueryParserService queryParser = queryParser(); - Query parsedQuery = queryParser.parse(functionScoreQuery(termQuery("name.last", "banon"), ScoreFunctionBuilders.weightFactorFunction(1.3f))).query(); - assertThat(parsedQuery, instanceOf(FunctionScoreQuery.class)); - FunctionScoreQuery functionScoreQuery = (FunctionScoreQuery) parsedQuery; - assertThat(((TermQuery) functionScoreQuery.getSubQuery()).getTerm(), equalTo(new Term("name.last", "banon"))); - assertThat((double) ((WeightFactorFunction) functionScoreQuery.getFunction()).getWeight(), closeTo(1.3, 0.001)); - } - - @Test - public void testCustomWeightFactorQueryBuilder_withFunctionScoreWithoutQueryGiven() throws IOException { - IndexQueryParserService queryParser = queryParser(); - Query parsedQuery = queryParser.parse(functionScoreQuery(ScoreFunctionBuilders.weightFactorFunction(1.3f))).query(); - assertThat(parsedQuery, instanceOf(FunctionScoreQuery.class)); - FunctionScoreQuery functionScoreQuery = (FunctionScoreQuery) parsedQuery; - assertThat(functionScoreQuery.getSubQuery() instanceof MatchAllDocsQuery, equalTo(true)); - assertThat((double) ((WeightFactorFunction) functionScoreQuery.getFunction()).getWeight(), closeTo(1.3, 0.001)); - } - - @Test - public void testSpanTermQueryBuilder() throws IOException { - IndexQueryParserService queryParser = queryParser(); - Query parsedQuery = queryParser.parse(spanTermQuery("age", 34)).query(); - assertThat(parsedQuery, instanceOf(SpanTermQuery.class)); - SpanTermQuery termQuery = (SpanTermQuery) parsedQuery; - // since age is automatically registered in data, we encode it as numeric - assertThat(termQuery.getTerm(), equalTo(new Term("age", longToPrefixCoded(34, 0)))); - } - - @Test - public void testSpanTermQuery() throws IOException { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/spanTerm.json"); - Query parsedQuery = queryParser.parse(query).query(); - assertThat(parsedQuery, instanceOf(SpanTermQuery.class)); - SpanTermQuery termQuery = (SpanTermQuery) parsedQuery; - // since age is automatically registered in data, we encode it as numeric - assertThat(termQuery.getTerm(), equalTo(new Term("age", longToPrefixCoded(34, 0)))); - } - - @Test - public void testSpanNotQueryBuilder() throws IOException { - IndexQueryParserService queryParser = queryParser(); - Query parsedQuery = queryParser.parse(spanNotQuery().include(spanTermQuery("age", 34)).exclude(spanTermQuery("age", 35))).query(); - assertThat(parsedQuery, instanceOf(SpanNotQuery.class)); - SpanNotQuery spanNotQuery = (SpanNotQuery) parsedQuery; - // since age is automatically registered in data, we encode it as numeric - assertThat(((SpanTermQuery) spanNotQuery.getInclude()).getTerm(), equalTo(new Term("age", longToPrefixCoded(34, 0)))); - assertThat(((SpanTermQuery) spanNotQuery.getExclude()).getTerm(), equalTo(new Term("age", longToPrefixCoded(35, 0)))); - } - - @Test - public void testSpanNotQuery() throws IOException { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/spanNot.json"); - Query parsedQuery = queryParser.parse(query).query(); - assertThat(parsedQuery, instanceOf(SpanNotQuery.class)); - SpanNotQuery spanNotQuery = (SpanNotQuery) parsedQuery; - // since age is automatically registered in data, we encode it as numeric - assertThat(((SpanTermQuery) spanNotQuery.getInclude()).getTerm(), equalTo(new Term("age", longToPrefixCoded(34, 0)))); - assertThat(((SpanTermQuery) spanNotQuery.getExclude()).getTerm(), equalTo(new Term("age", longToPrefixCoded(35, 0)))); - } - - @Test - public void testSpanWithinQueryBuilder() throws IOException { - IndexQueryParserService queryParser = queryParser(); - SpanTermQuery big = new SpanTermQuery(new Term("age", longToPrefixCoded(34, 0))); - big.setBoost(2); - SpanTermQuery little = new SpanTermQuery(new Term("age", longToPrefixCoded(35, 0))); - little.setBoost(3); - Query expectedQuery = new SpanWithinQuery(big, little); - - SpanWithinQueryBuilder spanWithinQueryBuilder = spanWithinQuery() - .big(spanTermQuery("age", 34).boost(2)) - .little(spanTermQuery("age", 35).boost(3)); - Query actualQuery = queryParser.parse(spanWithinQueryBuilder).query(); - assertEquals(expectedQuery, actualQuery); - - float boost = randomFloat(); - expectedQuery.setBoost(boost); - spanWithinQueryBuilder.boost(boost); - actualQuery = queryParser.parse(spanWithinQueryBuilder).query(); - assertEquals(expectedQuery, actualQuery); - } - - @Test - public void testSpanWithinQueryParser() throws IOException { - IndexQueryParserService queryParser = queryParser(); - Query expectedQuery = new SpanWithinQuery(new SpanTermQuery(new Term("age", longToPrefixCoded(34, 0))), - new SpanTermQuery(new Term("age", longToPrefixCoded(35, 0)))); - String queryText = copyToStringFromClasspath("/org/elasticsearch/index/query/spanWithin.json"); - Query actualQuery = queryParser.parse(queryText).query(); - assertEquals(expectedQuery, actualQuery); - } - - @Test - public void testSpanContainingQueryBuilder() throws IOException { - IndexQueryParserService queryParser = queryParser(); - SpanTermQuery big = new SpanTermQuery(new Term("age", longToPrefixCoded(34, 0))); - big.setBoost(2); - SpanTermQuery little = new SpanTermQuery(new Term("age", longToPrefixCoded(35, 0))); - little.setBoost(3); - Query expectedQuery = new SpanContainingQuery(big, little); - - SpanContainingQueryBuilder spanContainingQueryBuilder = spanContainingQuery() - .big(spanTermQuery("age", 34).boost(2)) - .little(spanTermQuery("age", 35).boost(3)); - Query actualQuery = queryParser.parse(spanContainingQueryBuilder).query(); - assertEquals(expectedQuery, actualQuery); - - float boost = randomFloat(); - expectedQuery.setBoost(boost); - spanContainingQueryBuilder.boost(boost); - actualQuery = queryParser.parse(spanContainingQueryBuilder).query(); - assertEquals(expectedQuery, actualQuery); - } - - @Test - public void testSpanContainingQueryParser() throws IOException { - IndexQueryParserService queryParser = queryParser(); - Query expectedQuery = new SpanContainingQuery(new SpanTermQuery(new Term("age", longToPrefixCoded(34, 0))), - new SpanTermQuery(new Term("age", longToPrefixCoded(35, 0)))); - String queryText = copyToStringFromClasspath("/org/elasticsearch/index/query/spanContaining.json"); - Query actualQuery = queryParser.parse(queryText).query(); - assertEquals(expectedQuery, actualQuery); - } - - @Test - public void testSpanFirstQueryBuilder() throws IOException { - IndexQueryParserService queryParser = queryParser(); - Query parsedQuery = queryParser.parse(spanFirstQuery(spanTermQuery("age", 34), 12)).query(); - assertThat(parsedQuery, instanceOf(SpanFirstQuery.class)); - SpanFirstQuery spanFirstQuery = (SpanFirstQuery) parsedQuery; - // since age is automatically registered in data, we encode it as numeric - assertThat(((SpanTermQuery) spanFirstQuery.getMatch()).getTerm(), equalTo(new Term("age", longToPrefixCoded(34, 0)))); - assertThat(spanFirstQuery.getEnd(), equalTo(12)); - } - - @Test - public void testSpanFirstQuery() throws IOException { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/spanFirst.json"); - Query parsedQuery = queryParser.parse(query).query(); - assertThat(parsedQuery, instanceOf(SpanFirstQuery.class)); - SpanFirstQuery spanFirstQuery = (SpanFirstQuery) parsedQuery; - // since age is automatically registered in data, we encode it as numeric - assertThat(((SpanTermQuery) spanFirstQuery.getMatch()).getTerm(), equalTo(new Term("age", longToPrefixCoded(34, 0)))); - assertThat(spanFirstQuery.getEnd(), equalTo(12)); - } - - @Test - public void testSpanNearQueryBuilder() throws IOException { - IndexQueryParserService queryParser = queryParser(); - Query parsedQuery = queryParser.parse(spanNearQuery().clause(spanTermQuery("age", 34)).clause(spanTermQuery("age", 35)).clause(spanTermQuery("age", 36)).slop(12).inOrder(false).collectPayloads(false)).query(); - assertThat(parsedQuery, instanceOf(SpanNearQuery.class)); - SpanNearQuery spanNearQuery = (SpanNearQuery) parsedQuery; - assertThat(spanNearQuery.getClauses().length, equalTo(3)); - assertThat(((SpanTermQuery) spanNearQuery.getClauses()[0]).getTerm(), equalTo(new Term("age", longToPrefixCoded(34, 0)))); - assertThat(((SpanTermQuery) spanNearQuery.getClauses()[1]).getTerm(), equalTo(new Term("age", longToPrefixCoded(35, 0)))); - assertThat(((SpanTermQuery) spanNearQuery.getClauses()[2]).getTerm(), equalTo(new Term("age", longToPrefixCoded(36, 0)))); - assertThat(spanNearQuery.isInOrder(), equalTo(false)); - } - - @Test - public void testSpanNearQuery() throws IOException { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/spanNear.json"); - Query parsedQuery = queryParser.parse(query).query(); - assertThat(parsedQuery, instanceOf(SpanNearQuery.class)); - SpanNearQuery spanNearQuery = (SpanNearQuery) parsedQuery; - assertThat(spanNearQuery.getClauses().length, equalTo(3)); - assertThat(((SpanTermQuery) spanNearQuery.getClauses()[0]).getTerm(), equalTo(new Term("age", longToPrefixCoded(34, 0)))); - assertThat(((SpanTermQuery) spanNearQuery.getClauses()[1]).getTerm(), equalTo(new Term("age", longToPrefixCoded(35, 0)))); - assertThat(((SpanTermQuery) spanNearQuery.getClauses()[2]).getTerm(), equalTo(new Term("age", longToPrefixCoded(36, 0)))); - assertThat(spanNearQuery.isInOrder(), equalTo(false)); - } - - @Test - public void testFieldMaskingSpanQuery() throws IOException { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/spanFieldMaskingTerm.json"); - Query parsedQuery = queryParser.parse(query).query(); - assertThat(parsedQuery, instanceOf(SpanNearQuery.class)); - SpanNearQuery spanNearQuery = (SpanNearQuery) parsedQuery; - assertThat(spanNearQuery.getClauses().length, equalTo(3)); - assertThat(((SpanTermQuery) spanNearQuery.getClauses()[0]).getTerm(), equalTo(new Term("age", longToPrefixCoded(34, 0)))); - assertThat(((SpanTermQuery) spanNearQuery.getClauses()[1]).getTerm(), equalTo(new Term("age", longToPrefixCoded(35, 0)))); - assertThat(((SpanTermQuery) ((FieldMaskingSpanQuery) spanNearQuery.getClauses()[2]).getMaskedQuery()).getTerm(), equalTo(new Term("age_1", "36"))); - assertThat(spanNearQuery.isInOrder(), equalTo(false)); - } - - - @Test - public void testSpanOrQueryBuilder() throws IOException { - IndexQueryParserService queryParser = queryParser(); - Query parsedQuery = queryParser.parse(spanOrQuery().clause(spanTermQuery("age", 34)).clause(spanTermQuery("age", 35)).clause(spanTermQuery("age", 36))).query(); - assertThat(parsedQuery, instanceOf(SpanOrQuery.class)); - SpanOrQuery spanOrQuery = (SpanOrQuery) parsedQuery; - assertThat(spanOrQuery.getClauses().length, equalTo(3)); - assertThat(((SpanTermQuery) spanOrQuery.getClauses()[0]).getTerm(), equalTo(new Term("age", longToPrefixCoded(34, 0)))); - assertThat(((SpanTermQuery) spanOrQuery.getClauses()[1]).getTerm(), equalTo(new Term("age", longToPrefixCoded(35, 0)))); - assertThat(((SpanTermQuery) spanOrQuery.getClauses()[2]).getTerm(), equalTo(new Term("age", longToPrefixCoded(36, 0)))); - } - - @Test - public void testSpanOrQuery() throws IOException { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/spanOr.json"); - Query parsedQuery = queryParser.parse(query).query(); - assertThat(parsedQuery, instanceOf(SpanOrQuery.class)); - SpanOrQuery spanOrQuery = (SpanOrQuery) parsedQuery; - assertThat(spanOrQuery.getClauses().length, equalTo(3)); - assertThat(((SpanTermQuery) spanOrQuery.getClauses()[0]).getTerm(), equalTo(new Term("age", longToPrefixCoded(34, 0)))); - assertThat(((SpanTermQuery) spanOrQuery.getClauses()[1]).getTerm(), equalTo(new Term("age", longToPrefixCoded(35, 0)))); - assertThat(((SpanTermQuery) spanOrQuery.getClauses()[2]).getTerm(), equalTo(new Term("age", longToPrefixCoded(36, 0)))); - } - - @Test - public void testSpanOrQuery2() throws IOException { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/spanOr2.json"); - Query parsedQuery = queryParser.parse(query).query(); - assertThat(parsedQuery, instanceOf(SpanOrQuery.class)); - SpanOrQuery spanOrQuery = (SpanOrQuery) parsedQuery; - assertThat(spanOrQuery.getClauses().length, equalTo(3)); - assertThat(((SpanTermQuery) spanOrQuery.getClauses()[0]).getTerm(), equalTo(new Term("age", longToPrefixCoded(34, 0)))); - assertThat(((SpanTermQuery) spanOrQuery.getClauses()[1]).getTerm(), equalTo(new Term("age", longToPrefixCoded(35, 0)))); - assertThat(((SpanTermQuery) spanOrQuery.getClauses()[2]).getTerm(), equalTo(new Term("age", longToPrefixCoded(36, 0)))); - } - - @Test - public void testSpanMultiTermWildcardQuery() throws IOException { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/span-multi-term-wildcard.json"); - Query parsedQuery = queryParser.parse(query).query(); - assertThat(parsedQuery, instanceOf(SpanMultiTermQueryWrapper.class)); - WildcardQuery expectedWrapped = new WildcardQuery(new Term("user", "ki*y")); - expectedWrapped.setBoost(1.08f); - SpanMultiTermQueryWrapper wrapper = (SpanMultiTermQueryWrapper) parsedQuery; - assertThat(wrapper, equalTo(new SpanMultiTermQueryWrapper(expectedWrapped))); - } - - @Test - public void testSpanMultiTermPrefixQuery() throws IOException { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/span-multi-term-prefix.json"); - Query parsedQuery = queryParser.parse(query).query(); - assertThat(parsedQuery, instanceOf(SpanMultiTermQueryWrapper.class)); - PrefixQuery expectedWrapped = new PrefixQuery(new Term("user", "ki")); - expectedWrapped.setBoost(1.08f); - SpanMultiTermQueryWrapper wrapper = (SpanMultiTermQueryWrapper) parsedQuery; - assertThat(wrapper, equalTo(new SpanMultiTermQueryWrapper(expectedWrapped))); - } - - @Test - public void testSpanMultiTermFuzzyTermQuery() throws IOException { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/span-multi-term-fuzzy-term.json"); - Query parsedQuery = queryParser.parse(query).query(); - assertThat(parsedQuery, instanceOf(SpanMultiTermQueryWrapper.class)); - SpanMultiTermQueryWrapper wrapper = (SpanMultiTermQueryWrapper) parsedQuery; - assertThat(wrapper.getField(), equalTo("user")); - } - - @Test - public void testSpanMultiTermFuzzyRangeQuery() throws IOException { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/span-multi-term-fuzzy-range.json"); - Query parsedQuery = queryParser.parse(query).query(); - assertThat(parsedQuery, instanceOf(SpanMultiTermQueryWrapper.class)); - NumericRangeQuery expectedWrapped = NumericRangeQuery.newLongRange("age", NumberFieldMapper.Defaults.PRECISION_STEP_64_BIT, 7l, 17l, true, true); - expectedWrapped.setBoost(2.0f); - SpanMultiTermQueryWrapper wrapper = (SpanMultiTermQueryWrapper) parsedQuery; - assertThat(wrapper, equalTo(new SpanMultiTermQueryWrapper(expectedWrapped))); - } - - @Test - public void testSpanMultiTermNumericRangeQuery() throws IOException { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/span-multi-term-range-numeric.json"); - Query parsedQuery = queryParser.parse(query).query(); - assertThat(parsedQuery, instanceOf(SpanMultiTermQueryWrapper.class)); - NumericRangeQuery expectedWrapped = NumericRangeQuery.newLongRange("age", NumberFieldMapper.Defaults.PRECISION_STEP_64_BIT, 10l, 20l, true, false); - expectedWrapped.setBoost(2.0f); - SpanMultiTermQueryWrapper wrapper = (SpanMultiTermQueryWrapper) parsedQuery; - assertThat(wrapper, equalTo(new SpanMultiTermQueryWrapper(expectedWrapped))); - } - - @Test - public void testSpanMultiTermTermRangeQuery() throws IOException { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/span-multi-term-range-term.json"); - Query parsedQuery = queryParser.parse(query).query(); - assertThat(parsedQuery, instanceOf(SpanMultiTermQueryWrapper.class)); - TermRangeQuery expectedWrapped = TermRangeQuery.newStringRange("user", "alice", "bob", true, false); - expectedWrapped.setBoost(2.0f); - SpanMultiTermQueryWrapper wrapper = (SpanMultiTermQueryWrapper) parsedQuery; - assertThat(wrapper, equalTo(new SpanMultiTermQueryWrapper(expectedWrapped))); - } - - @Test - public void testMoreLikeThisBuilder() throws Exception { - IndexQueryParserService queryParser = queryParser(); - Query parsedQuery = queryParser.parse(moreLikeThisQuery("name.first", "name.last").likeText("something").minTermFreq(1).maxQueryTerms(12)).query(); - assertThat(parsedQuery, instanceOf(MoreLikeThisQuery.class)); - MoreLikeThisQuery mltQuery = (MoreLikeThisQuery) parsedQuery; - assertThat(mltQuery.getMoreLikeFields()[0], equalTo("name.first")); - assertThat(mltQuery.getLikeText(), equalTo("something")); - assertThat(mltQuery.getMinTermFrequency(), equalTo(1)); - assertThat(mltQuery.getMaxQueryTerms(), equalTo(12)); - } - - @Test - public void testMoreLikeThis() throws Exception { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/mlt.json"); - Query parsedQuery = queryParser.parse(query).query(); - assertThat(parsedQuery, instanceOf(MoreLikeThisQuery.class)); - MoreLikeThisQuery mltQuery = (MoreLikeThisQuery) parsedQuery; - assertThat(mltQuery.getMoreLikeFields()[0], equalTo("name.first")); - assertThat(mltQuery.getMoreLikeFields()[1], equalTo("name.last")); - assertThat(mltQuery.getLikeText(), equalTo("something")); - assertThat(mltQuery.getMinTermFrequency(), equalTo(1)); - assertThat(mltQuery.getMaxQueryTerms(), equalTo(12)); - } - - @Test - public void testMoreLikeThisIds() throws Exception { - MoreLikeThisQueryParser parser = (MoreLikeThisQueryParser) queryParser.queryParser("more_like_this"); - parser.setFetchService(new MockMoreLikeThisFetchService()); - - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/mlt-items.json"); - Query parsedQuery = queryParser.parse(query).query(); - assertThat(parsedQuery, instanceOf(BooleanQuery.class)); - BooleanQuery booleanQuery = (BooleanQuery) parsedQuery; - assertThat(booleanQuery.getClauses().length, is(1)); - - BooleanClause itemClause = booleanQuery.getClauses()[0]; - assertThat(itemClause.getOccur(), is(BooleanClause.Occur.SHOULD)); - assertThat(itemClause.getQuery(), instanceOf(MoreLikeThisQuery.class)); - MoreLikeThisQuery mltQuery = (MoreLikeThisQuery) itemClause.getQuery(); - - // check each Fields is for each item - for (int id = 1; id <= 4; id++) { - Fields fields = mltQuery.getLikeFields()[id - 1]; - assertThat(termsToString(fields.terms("name.first")), is(String.valueOf(id))); - assertThat(termsToString(fields.terms("name.last")), is(String.valueOf(id))); - } - } - - @Test - public void testMLTMinimumShouldMatch() throws Exception { - // setup for mocking fetching items - MoreLikeThisQueryParser parser = (MoreLikeThisQueryParser) queryParser.queryParser("more_like_this"); - parser.setFetchService(new MockMoreLikeThisFetchService()); - - // parsing the ES query - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/mlt-items.json"); - BooleanQuery parsedQuery = (BooleanQuery) queryParser.parse(query).query(); - - // get MLT query, other clause is for include/exclude items - MoreLikeThisQuery mltQuery = (MoreLikeThisQuery) parsedQuery.getClauses()[0].getQuery(); - - // all terms must match - mltQuery.setMinimumShouldMatch("100%"); - mltQuery.setMinWordLen(0); - mltQuery.setMinDocFreq(0); - - // one document has all values - MemoryIndex index = new MemoryIndex(); - index.addField("name.first", "apache lucene", new WhitespaceAnalyzer()); - index.addField("name.last", "1 2 3 4", new WhitespaceAnalyzer()); - - // two clauses, one for items and one for like_text if set - BooleanQuery luceneQuery = (BooleanQuery) mltQuery.rewrite(index.createSearcher().getIndexReader()); - BooleanClause[] clauses = luceneQuery.getClauses(); - - // check for items - int minNumberShouldMatch = ((BooleanQuery) (clauses[0].getQuery())).getMinimumNumberShouldMatch(); - assertThat(minNumberShouldMatch, is(4)); - - // and for like_text - minNumberShouldMatch = ((BooleanQuery) (clauses[1].getQuery())).getMinimumNumberShouldMatch(); - assertThat(minNumberShouldMatch, is(2)); - } - - private static class MockMoreLikeThisFetchService extends MoreLikeThisFetchService { - - public MockMoreLikeThisFetchService() { - super(null, Settings.Builder.EMPTY_SETTINGS); - } - - @Override - public MultiTermVectorsResponse fetchResponse(List items, List unlikeItems, SearchContext searchContext) throws IOException { - MultiTermVectorsItemResponse[] responses = new MultiTermVectorsItemResponse[items.size()]; - int i = 0; - for (Item item : items) { - TermVectorsResponse response = new TermVectorsResponse(item.index(), item.type(), item.id()); - response.setExists(true); - Fields generatedFields = generateFields(item.fields(), item.id()); - EnumSet flags = EnumSet.of(TermVectorsRequest.Flag.Positions, TermVectorsRequest.Flag.Offsets); - response.setFields(generatedFields, new HashSet(Arrays.asList(item.fields())), flags, generatedFields); - responses[i++] = new MultiTermVectorsItemResponse(response, null); - } - return new MultiTermVectorsResponse(responses); - } - } - - private static Fields generateFields(String[] fieldNames, String text) throws IOException { - MemoryIndex index = new MemoryIndex(); - for (String fieldName : fieldNames) { - index.addField(fieldName, text, new WhitespaceAnalyzer()); - } - return MultiFields.getFields(index.createSearcher().getIndexReader()); - } - - private static String termsToString(Terms terms) throws IOException { - String strings = ""; - TermsEnum termsEnum = terms.iterator(); - CharsRefBuilder spare = new CharsRefBuilder(); - BytesRef text; - while((text = termsEnum.next()) != null) { - spare.copyUTF8Bytes(text); - String term = spare.toString(); - strings += term; - } - return strings; - } - - @Test - public void testGeoDistanceRangeQueryNamed() throws IOException { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/geo_distance-named.json"); - ParsedQuery parsedQuery = queryParser.parse(query); - assertThat(parsedQuery.namedFilters().containsKey("test"), equalTo(true)); - GeoDistanceRangeQuery filter = (GeoDistanceRangeQuery) parsedQuery.query(); - assertThat(filter.fieldName(), equalTo("location")); - assertThat(filter.lat(), closeTo(40, 0.00001)); - assertThat(filter.lon(), closeTo(-70, 0.00001)); - assertThat(filter.minInclusiveDistance(), equalTo(Double.NEGATIVE_INFINITY)); - assertThat(filter.maxInclusiveDistance(), closeTo(DistanceUnit.DEFAULT.convert(12, DistanceUnit.MILES), 0.00001)); - } - - @Test - public void testGeoDistanceRangeQuery1() throws IOException { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/geo_distance1.json"); - Query parsedQuery = queryParser.parse(query).query(); - GeoDistanceRangeQuery filter = (GeoDistanceRangeQuery) parsedQuery; - assertThat(filter.fieldName(), equalTo("location")); - assertThat(filter.lat(), closeTo(40, 0.00001)); - assertThat(filter.lon(), closeTo(-70, 0.00001)); - assertThat(filter.minInclusiveDistance(), equalTo(Double.NEGATIVE_INFINITY)); - assertThat(filter.maxInclusiveDistance(), closeTo(DistanceUnit.DEFAULT.convert(12, DistanceUnit.MILES), 0.00001)); - } - - @Test - public void testGeoDistanceRangeQuery2() throws IOException { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/geo_distance2.json"); - Query parsedQuery = queryParser.parse(query).query(); - GeoDistanceRangeQuery filter = (GeoDistanceRangeQuery) parsedQuery; - assertThat(filter.fieldName(), equalTo("location")); - assertThat(filter.lat(), closeTo(40, 0.00001)); - assertThat(filter.lon(), closeTo(-70, 0.00001)); - assertThat(filter.minInclusiveDistance(), equalTo(Double.NEGATIVE_INFINITY)); - assertThat(filter.maxInclusiveDistance(), closeTo(DistanceUnit.DEFAULT.convert(12, DistanceUnit.MILES), 0.00001)); - } - - @Test - public void testGeoDistanceRangeQuery3() throws IOException { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/geo_distance3.json"); - Query parsedQuery = queryParser.parse(query).query(); - GeoDistanceRangeQuery filter = (GeoDistanceRangeQuery) parsedQuery; - assertThat(filter.fieldName(), equalTo("location")); - assertThat(filter.lat(), closeTo(40, 0.00001)); - assertThat(filter.lon(), closeTo(-70, 0.00001)); - assertThat(filter.minInclusiveDistance(), equalTo(Double.NEGATIVE_INFINITY)); - assertThat(filter.maxInclusiveDistance(), closeTo(DistanceUnit.DEFAULT.convert(12, DistanceUnit.MILES), 0.00001)); - } - - @Test - public void testGeoDistanceRangeQuery4() throws IOException { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/geo_distance4.json"); - Query parsedQuery = queryParser.parse(query).query(); - GeoDistanceRangeQuery filter = (GeoDistanceRangeQuery) parsedQuery; - assertThat(filter.fieldName(), equalTo("location")); - assertThat(filter.lat(), closeTo(40, 0.00001)); - assertThat(filter.lon(), closeTo(-70, 0.00001)); - assertThat(filter.minInclusiveDistance(), equalTo(Double.NEGATIVE_INFINITY)); - assertThat(filter.maxInclusiveDistance(), closeTo(DistanceUnit.DEFAULT.convert(12, DistanceUnit.MILES), 0.00001)); - } - - @Test - public void testGeoDistanceRangeQuery5() throws IOException { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/geo_distance5.json"); - Query parsedQuery = queryParser.parse(query).query(); - GeoDistanceRangeQuery filter = (GeoDistanceRangeQuery) parsedQuery; - assertThat(filter.fieldName(), equalTo("location")); - assertThat(filter.lat(), closeTo(40, 0.00001)); - assertThat(filter.lon(), closeTo(-70, 0.00001)); - assertThat(filter.minInclusiveDistance(), equalTo(Double.NEGATIVE_INFINITY)); - assertThat(filter.maxInclusiveDistance(), closeTo(DistanceUnit.DEFAULT.convert(12, DistanceUnit.MILES), 0.00001)); - } - - @Test - public void testGeoDistanceRangeQuery6() throws IOException { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/geo_distance6.json"); - Query parsedQuery = queryParser.parse(query).query(); - GeoDistanceRangeQuery filter = (GeoDistanceRangeQuery) parsedQuery; - assertThat(filter.fieldName(), equalTo("location")); - assertThat(filter.lat(), closeTo(40, 0.00001)); - assertThat(filter.lon(), closeTo(-70, 0.00001)); - assertThat(filter.minInclusiveDistance(), equalTo(Double.NEGATIVE_INFINITY)); - assertThat(filter.maxInclusiveDistance(), closeTo(DistanceUnit.DEFAULT.convert(12, DistanceUnit.MILES), 0.00001)); - } - - @Test - public void testGeoDistanceRangeQuery7() throws IOException { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/geo_distance7.json"); - Query parsedQuery = queryParser.parse(query).query(); - GeoDistanceRangeQuery filter = (GeoDistanceRangeQuery) parsedQuery; - assertThat(filter.fieldName(), equalTo("location")); - assertThat(filter.lat(), closeTo(40, 0.00001)); - assertThat(filter.lon(), closeTo(-70, 0.00001)); - assertThat(filter.minInclusiveDistance(), equalTo(Double.NEGATIVE_INFINITY)); - assertThat(filter.maxInclusiveDistance(), closeTo(DistanceUnit.DEFAULT.convert(0.012, DistanceUnit.MILES), 0.00001)); - } - - @Test - public void testGeoDistanceRangeQuery8() throws IOException { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/geo_distance8.json"); - Query parsedQuery = queryParser.parse(query).query(); - GeoDistanceRangeQuery filter = (GeoDistanceRangeQuery) parsedQuery; - assertThat(filter.fieldName(), equalTo("location")); - assertThat(filter.lat(), closeTo(40, 0.00001)); - assertThat(filter.lon(), closeTo(-70, 0.00001)); - assertThat(filter.minInclusiveDistance(), equalTo(Double.NEGATIVE_INFINITY)); - assertThat(filter.maxInclusiveDistance(), closeTo(DistanceUnit.KILOMETERS.convert(12, DistanceUnit.MILES), 0.00001)); - } - - @Test - public void testGeoDistanceRangeQuery9() throws IOException { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/geo_distance9.json"); - Query parsedQuery = queryParser.parse(query).query(); - GeoDistanceRangeQuery filter = (GeoDistanceRangeQuery) parsedQuery; - assertThat(filter.fieldName(), equalTo("location")); - assertThat(filter.lat(), closeTo(40, 0.00001)); - assertThat(filter.lon(), closeTo(-70, 0.00001)); - assertThat(filter.minInclusiveDistance(), equalTo(Double.NEGATIVE_INFINITY)); - assertThat(filter.maxInclusiveDistance(), closeTo(DistanceUnit.DEFAULT.convert(12, DistanceUnit.MILES), 0.00001)); - } - - @Test - public void testGeoDistanceRangeQuery10() throws IOException { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/geo_distance10.json"); - Query parsedQuery = queryParser.parse(query).query(); - GeoDistanceRangeQuery filter = (GeoDistanceRangeQuery) parsedQuery; - assertThat(filter.fieldName(), equalTo("location")); - assertThat(filter.lat(), closeTo(40, 0.00001)); - assertThat(filter.lon(), closeTo(-70, 0.00001)); - assertThat(filter.minInclusiveDistance(), equalTo(Double.NEGATIVE_INFINITY)); - assertThat(filter.maxInclusiveDistance(), closeTo(DistanceUnit.DEFAULT.convert(12, DistanceUnit.MILES), 0.00001)); - } - - @Test - public void testGeoDistanceRangeQuery11() throws IOException { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/geo_distance11.json"); - Query parsedQuery = queryParser.parse(query).query(); - GeoDistanceRangeQuery filter = (GeoDistanceRangeQuery) parsedQuery; - assertThat(filter.fieldName(), equalTo("location")); - assertThat(filter.lat(), closeTo(40, 0.00001)); - assertThat(filter.lon(), closeTo(-70, 0.00001)); - assertThat(filter.minInclusiveDistance(), equalTo(Double.NEGATIVE_INFINITY)); - assertThat(filter.maxInclusiveDistance(), closeTo(DistanceUnit.DEFAULT.convert(12, DistanceUnit.MILES), 0.00001)); - } - - @Test - public void testGeoDistanceRangeQuery12() throws IOException { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/geo_distance12.json"); - Query parsedQuery = queryParser.parse(query).query(); - GeoDistanceRangeQuery filter = (GeoDistanceRangeQuery) parsedQuery; - assertThat(filter.fieldName(), equalTo("location")); - assertThat(filter.lat(), closeTo(40, 0.00001)); - assertThat(filter.lon(), closeTo(-70, 0.00001)); - assertThat(filter.minInclusiveDistance(), equalTo(Double.NEGATIVE_INFINITY)); - assertThat(filter.maxInclusiveDistance(), closeTo(DistanceUnit.DEFAULT.convert(12, DistanceUnit.MILES), 0.00001)); - } - - @Test - public void testGeoBoundingBoxFilterNamed() throws IOException { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/geo_boundingbox-named.json"); - ParsedQuery parsedQuery = queryParser.parse(query); - assertThat(parsedQuery.namedFilters().containsKey("test"), equalTo(true)); - InMemoryGeoBoundingBoxQuery filter = (InMemoryGeoBoundingBoxQuery) parsedQuery.query(); - assertThat(filter.fieldName(), equalTo("location")); - assertThat(filter.topLeft().lat(), closeTo(40, 0.00001)); - assertThat(filter.topLeft().lon(), closeTo(-70, 0.00001)); - assertThat(filter.bottomRight().lat(), closeTo(30, 0.00001)); - assertThat(filter.bottomRight().lon(), closeTo(-80, 0.00001)); - } - - @Test - public void testGeoBoundingBoxFilter1() throws IOException { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/geo_boundingbox1.json"); - Query parsedQuery = queryParser.parse(query).query(); - InMemoryGeoBoundingBoxQuery filter = (InMemoryGeoBoundingBoxQuery) parsedQuery; - assertThat(filter.fieldName(), equalTo("location")); - assertThat(filter.topLeft().lat(), closeTo(40, 0.00001)); - assertThat(filter.topLeft().lon(), closeTo(-70, 0.00001)); - assertThat(filter.bottomRight().lat(), closeTo(30, 0.00001)); - assertThat(filter.bottomRight().lon(), closeTo(-80, 0.00001)); - } - - @Test - public void testGeoBoundingBoxFilter2() throws IOException { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/geo_boundingbox2.json"); - Query parsedQuery = queryParser.parse(query).query(); - InMemoryGeoBoundingBoxQuery filter = (InMemoryGeoBoundingBoxQuery) parsedQuery; - assertThat(filter.fieldName(), equalTo("location")); - assertThat(filter.topLeft().lat(), closeTo(40, 0.00001)); - assertThat(filter.topLeft().lon(), closeTo(-70, 0.00001)); - assertThat(filter.bottomRight().lat(), closeTo(30, 0.00001)); - assertThat(filter.bottomRight().lon(), closeTo(-80, 0.00001)); - } - - @Test - public void testGeoBoundingBoxFilter3() throws IOException { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/geo_boundingbox3.json"); - Query parsedQuery = queryParser.parse(query).query(); - InMemoryGeoBoundingBoxQuery filter = (InMemoryGeoBoundingBoxQuery) parsedQuery; - assertThat(filter.fieldName(), equalTo("location")); - assertThat(filter.topLeft().lat(), closeTo(40, 0.00001)); - assertThat(filter.topLeft().lon(), closeTo(-70, 0.00001)); - assertThat(filter.bottomRight().lat(), closeTo(30, 0.00001)); - assertThat(filter.bottomRight().lon(), closeTo(-80, 0.00001)); - } - - @Test - public void testGeoBoundingBoxFilter4() throws IOException { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/geo_boundingbox4.json"); - Query parsedQuery = queryParser.parse(query).query(); - InMemoryGeoBoundingBoxQuery filter = (InMemoryGeoBoundingBoxQuery) parsedQuery; - assertThat(filter.fieldName(), equalTo("location")); - assertThat(filter.topLeft().lat(), closeTo(40, 0.00001)); - assertThat(filter.topLeft().lon(), closeTo(-70, 0.00001)); - assertThat(filter.bottomRight().lat(), closeTo(30, 0.00001)); - assertThat(filter.bottomRight().lon(), closeTo(-80, 0.00001)); - } - - @Test - public void testGeoBoundingBoxFilter5() throws IOException { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/geo_boundingbox5.json"); - Query parsedQuery = queryParser.parse(query).query(); - InMemoryGeoBoundingBoxQuery filter = (InMemoryGeoBoundingBoxQuery) parsedQuery; - assertThat(filter.fieldName(), equalTo("location")); - assertThat(filter.topLeft().lat(), closeTo(40, 0.00001)); - assertThat(filter.topLeft().lon(), closeTo(-70, 0.00001)); - assertThat(filter.bottomRight().lat(), closeTo(30, 0.00001)); - assertThat(filter.bottomRight().lon(), closeTo(-80, 0.00001)); - } - - @Test - public void testGeoBoundingBoxFilter6() throws IOException { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/geo_boundingbox6.json"); - Query parsedQuery = queryParser.parse(query).query(); - InMemoryGeoBoundingBoxQuery filter = (InMemoryGeoBoundingBoxQuery) parsedQuery; - assertThat(filter.fieldName(), equalTo("location")); - assertThat(filter.topLeft().lat(), closeTo(40, 0.00001)); - assertThat(filter.topLeft().lon(), closeTo(-70, 0.00001)); - assertThat(filter.bottomRight().lat(), closeTo(30, 0.00001)); - assertThat(filter.bottomRight().lon(), closeTo(-80, 0.00001)); - } - - - @Test - public void testGeoPolygonNamedFilter() throws IOException { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/geo_polygon-named.json"); - ParsedQuery parsedQuery = queryParser.parse(query); - assertThat(parsedQuery.namedFilters().containsKey("test"), equalTo(true)); - GeoPolygonQuery filter = (GeoPolygonQuery) parsedQuery.query(); - assertThat(filter.fieldName(), equalTo("location")); - assertThat(filter.points().length, equalTo(4)); - assertThat(filter.points()[0].lat(), closeTo(40, 0.00001)); - assertThat(filter.points()[0].lon(), closeTo(-70, 0.00001)); - assertThat(filter.points()[1].lat(), closeTo(30, 0.00001)); - assertThat(filter.points()[1].lon(), closeTo(-80, 0.00001)); - assertThat(filter.points()[2].lat(), closeTo(20, 0.00001)); - assertThat(filter.points()[2].lon(), closeTo(-90, 0.00001)); - } - - - @Test - public void testGeoPolygonFilterParsingExceptions() throws IOException { - String[] brokenFiles = new String[]{ - "/org/elasticsearch/index/query/geo_polygon_exception_1.json", - "/org/elasticsearch/index/query/geo_polygon_exception_2.json", - "/org/elasticsearch/index/query/geo_polygon_exception_3.json", - "/org/elasticsearch/index/query/geo_polygon_exception_4.json", - "/org/elasticsearch/index/query/geo_polygon_exception_5.json" - }; - for (String brokenFile : brokenFiles) { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath(brokenFile); - try { - queryParser.parse(query).query(); - fail("parsing a broken geo_polygon filter didn't fail as expected while parsing: " + brokenFile); - } catch (ParsingException e) { - // success! - } - } - } - - - @Test - public void testGeoPolygonFilter1() throws IOException { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/geo_polygon1.json"); - Query parsedQuery = queryParser.parse(query).query(); - GeoPolygonQuery filter = (GeoPolygonQuery) parsedQuery; - assertThat(filter.fieldName(), equalTo("location")); - assertThat(filter.points().length, equalTo(4)); - assertThat(filter.points()[0].lat(), closeTo(40, 0.00001)); - assertThat(filter.points()[0].lon(), closeTo(-70, 0.00001)); - assertThat(filter.points()[1].lat(), closeTo(30, 0.00001)); - assertThat(filter.points()[1].lon(), closeTo(-80, 0.00001)); - assertThat(filter.points()[2].lat(), closeTo(20, 0.00001)); - assertThat(filter.points()[2].lon(), closeTo(-90, 0.00001)); - } - - @Test - public void testGeoPolygonFilter2() throws IOException { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/geo_polygon2.json"); - Query parsedQuery = queryParser.parse(query).query(); - GeoPolygonQuery filter = (GeoPolygonQuery) parsedQuery; - assertThat(filter.fieldName(), equalTo("location")); - assertThat(filter.points().length, equalTo(4)); - assertThat(filter.points()[0].lat(), closeTo(40, 0.00001)); - assertThat(filter.points()[0].lon(), closeTo(-70, 0.00001)); - assertThat(filter.points()[1].lat(), closeTo(30, 0.00001)); - assertThat(filter.points()[1].lon(), closeTo(-80, 0.00001)); - assertThat(filter.points()[2].lat(), closeTo(20, 0.00001)); - assertThat(filter.points()[2].lon(), closeTo(-90, 0.00001)); - } - - @Test - public void testGeoPolygonFilter3() throws IOException { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/geo_polygon3.json"); - Query parsedQuery = queryParser.parse(query).query(); - GeoPolygonQuery filter = (GeoPolygonQuery) parsedQuery; - assertThat(filter.fieldName(), equalTo("location")); - assertThat(filter.points().length, equalTo(4)); - assertThat(filter.points()[0].lat(), closeTo(40, 0.00001)); - assertThat(filter.points()[0].lon(), closeTo(-70, 0.00001)); - assertThat(filter.points()[1].lat(), closeTo(30, 0.00001)); - assertThat(filter.points()[1].lon(), closeTo(-80, 0.00001)); - assertThat(filter.points()[2].lat(), closeTo(20, 0.00001)); - assertThat(filter.points()[2].lon(), closeTo(-90, 0.00001)); - } - - @Test - public void testGeoPolygonFilter4() throws IOException { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/geo_polygon4.json"); - Query parsedQuery = queryParser.parse(query).query(); - GeoPolygonQuery filter = (GeoPolygonQuery) parsedQuery; - assertThat(filter.fieldName(), equalTo("location")); - assertThat(filter.points().length, equalTo(4)); - assertThat(filter.points()[0].lat(), closeTo(40, 0.00001)); - assertThat(filter.points()[0].lon(), closeTo(-70, 0.00001)); - assertThat(filter.points()[1].lat(), closeTo(30, 0.00001)); - assertThat(filter.points()[1].lon(), closeTo(-80, 0.00001)); - assertThat(filter.points()[2].lat(), closeTo(20, 0.00001)); - assertThat(filter.points()[2].lon(), closeTo(-90, 0.00001)); - } - - @Test - public void testGeoShapeFilter() throws IOException { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/geoShape-filter.json"); - Query parsedQuery = queryParser.parse(query).query(); - ConstantScoreQuery constantScoreQuery = (ConstantScoreQuery) parsedQuery; - assertThat(constantScoreQuery.getQuery(), instanceOf(IntersectsPrefixTreeFilter.class)); - } - - @Test - public void testGeoShapeQuery() throws IOException { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/geoShape-query.json"); - Query parsedQuery = queryParser.parse(query).query(); - assertThat(parsedQuery, instanceOf(ConstantScoreQuery.class)); - ConstantScoreQuery csq = (ConstantScoreQuery) parsedQuery; - assertThat(csq.getQuery(), instanceOf(IntersectsPrefixTreeFilter.class)); - } - - @Test - public void testCommonTermsQuery1() throws IOException { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/commonTerms-query1.json"); - Query parsedQuery = queryParser.parse(query).query(); - assertThat(parsedQuery, instanceOf(ExtendedCommonTermsQuery.class)); - ExtendedCommonTermsQuery ectQuery = (ExtendedCommonTermsQuery) parsedQuery; - assertThat(ectQuery.getHighFreqMinimumNumberShouldMatchSpec(), nullValue()); - assertThat(ectQuery.getLowFreqMinimumNumberShouldMatchSpec(), equalTo("2")); - } - - @Test - public void testCommonTermsQuery2() throws IOException { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/commonTerms-query2.json"); - Query parsedQuery = queryParser.parse(query).query(); - assertThat(parsedQuery, instanceOf(ExtendedCommonTermsQuery.class)); - ExtendedCommonTermsQuery ectQuery = (ExtendedCommonTermsQuery) parsedQuery; - assertThat(ectQuery.getHighFreqMinimumNumberShouldMatchSpec(), equalTo("50%")); - assertThat(ectQuery.getLowFreqMinimumNumberShouldMatchSpec(), equalTo("5<20%")); - } - - @Test - public void testCommonTermsQuery3() throws IOException { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/commonTerms-query3.json"); - Query parsedQuery = queryParser.parse(query).query(); - assertThat(parsedQuery, instanceOf(ExtendedCommonTermsQuery.class)); - ExtendedCommonTermsQuery ectQuery = (ExtendedCommonTermsQuery) parsedQuery; - assertThat(ectQuery.getHighFreqMinimumNumberShouldMatchSpec(), nullValue()); - assertThat(ectQuery.getLowFreqMinimumNumberShouldMatchSpec(), equalTo("2")); - } - - @Test // see #11730 - public void testCommonTermsQuery4() throws IOException { - IndexQueryParserService queryParser = queryParser(); - Query parsedQuery = queryParser.parse(commonTermsQuery("field", "text").disableCoord(false)).query(); - assertThat(parsedQuery, instanceOf(ExtendedCommonTermsQuery.class)); - ExtendedCommonTermsQuery ectQuery = (ExtendedCommonTermsQuery) parsedQuery; - assertFalse(ectQuery.isCoordDisabled()); - parsedQuery = queryParser.parse(commonTermsQuery("field", "text").disableCoord(true)).query(); - assertThat(parsedQuery, instanceOf(ExtendedCommonTermsQuery.class)); - ectQuery = (ExtendedCommonTermsQuery) parsedQuery; - assertTrue(ectQuery.isCoordDisabled()); - } - - @Test(expected = ParsingException.class) - public void assureMalformedThrowsException() throws IOException { - IndexQueryParserService queryParser; - queryParser = queryParser(); - String query; - query = copyToStringFromClasspath("/org/elasticsearch/index/query/faulty-function-score-query.json"); - Query parsedQuery = queryParser.parse(query).query(); - } - - @Test - public void testFilterParsing() throws IOException { - IndexQueryParserService queryParser; - queryParser = queryParser(); - String query; - query = copyToStringFromClasspath("/org/elasticsearch/index/query/function-filter-score-query.json"); - Query parsedQuery = queryParser.parse(query).query(); - assertThat((double) (parsedQuery.getBoost()), Matchers.closeTo(3.0, 1.e-7)); - } - - @Test - public void testBadTypeMatchQuery() throws Exception { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/match-query-bad-type.json"); - ParsingException expectedException = null; - try { - queryParser.parse(query).query(); - } catch (ParsingException qpe) { - expectedException = qpe; - } - assertThat(expectedException, notNullValue()); - } - - @Test - public void testMultiMatchQuery() throws Exception { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/multiMatch-query-simple.json"); - Query parsedQuery = queryParser.parse(query).query(); - assertThat(parsedQuery, instanceOf(DisjunctionMaxQuery.class)); - } - - @Test - public void testBadTypeMultiMatchQuery() throws Exception { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/multiMatch-query-bad-type.json"); - ParsingException expectedException = null; - try { - queryParser.parse(query).query(); - } catch (ParsingException qpe) { - expectedException = qpe; - } - assertThat(expectedException, notNullValue()); - } - - @Test - public void testMultiMatchQueryWithFieldsAsString() throws Exception { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/multiMatch-query-fields-as-string.json"); - Query parsedQuery = queryParser.parse(query).query(); - assertThat(parsedQuery, instanceOf(BooleanQuery.class)); - } - - public void testCrossFieldMultiMatchQuery() throws IOException { - IndexQueryParserService queryParser = queryParser(); - Query parsedQuery = queryParser.parse(multiMatchQuery("banon", "name.first^2", "name.last^3", "foobar").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS)).query(); - try (Engine.Searcher searcher = indexService.shardSafe(0).acquireSearcher("test")) { - Query rewrittenQuery = searcher.searcher().rewrite(parsedQuery); - - BooleanQuery.Builder expected = new BooleanQuery.Builder(); - expected.add(new TermQuery(new Term("foobar", "banon")), Occur.SHOULD); - Query tq1 = new BoostQuery(new TermQuery(new Term("name.first", "banon")), 2); - Query tq2 = new BoostQuery(new TermQuery(new Term("name.last", "banon")), 3); - expected.add(new DisjunctionMaxQuery(Arrays.asList(tq1, tq2), 0f), Occur.SHOULD); - assertEquals(expected.build(), rewrittenQuery); - } - } - - @Test - public void testSimpleQueryString() throws Exception { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/simple-query-string.json"); - Query parsedQuery = queryParser.parse(query).query(); - assertThat(parsedQuery, instanceOf(BooleanQuery.class)); - } - - @Test - public void testSimpleQueryStringBoost() throws Exception { - IndexQueryParserService queryParser = queryParser(); - SimpleQueryStringBuilder simpleQueryStringBuilder = new SimpleQueryStringBuilder("test"); - simpleQueryStringBuilder.field("body", 5); - Query parsedQuery = queryParser.parse(simpleQueryStringBuilder.toString()).query(); - assertThat(parsedQuery, instanceOf(TermQuery.class)); - assertThat(parsedQuery.getBoost(), equalTo(5f)); - - simpleQueryStringBuilder = new SimpleQueryStringBuilder("test"); - simpleQueryStringBuilder.field("body", 5); - simpleQueryStringBuilder.boost(2); - parsedQuery = queryParser.parse(simpleQueryStringBuilder.toString()).query(); - assertThat(parsedQuery, instanceOf(TermQuery.class)); - assertThat(parsedQuery.getBoost(), equalTo(10f)); - } - - @Test - public void testMatchWithFuzzyTranspositions() throws Exception { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/match-with-fuzzy-transpositions.json"); - Query parsedQuery = queryParser.parse(query).query(); - assertThat(parsedQuery, instanceOf(FuzzyQuery.class)); - assertThat( ((FuzzyQuery) parsedQuery).getTranspositions(), equalTo(true)); - } - - @Test - public void testMatchWithoutFuzzyTranspositions() throws Exception { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/match-without-fuzzy-transpositions.json"); - Query parsedQuery = queryParser.parse(query).query(); - assertThat(parsedQuery, instanceOf(FuzzyQuery.class)); - assertThat( ((FuzzyQuery) parsedQuery).getTranspositions(), equalTo(false)); - } - - // https://github.com/elasticsearch/elasticsearch/issues/7240 - @Test - public void testEmptyBooleanQuery() throws Exception { - IndexQueryParserService queryParser = queryParser(); - String query = jsonBuilder().startObject().startObject("bool").endObject().endObject().string(); - Query parsedQuery = queryParser.parse(query).query(); - assertThat(parsedQuery, instanceOf(MatchAllDocsQuery.class)); - } - - @Test - public void testProperErrorMessageWhenTwoFunctionsDefinedInQueryBody() throws IOException { - IndexQueryParserService queryParser = queryParser(); - String query = copyToStringFromClasspath("/org/elasticsearch/index/query/function-score-query-causing-NPE.json"); - try { - queryParser.parse(query).query(); - fail("FunctionScoreQueryParser should throw an exception here because two functions in body are not allowed."); - } catch (ParsingException e) { - assertThat(e.getDetailedMessage(), containsString("use [functions] array if you want to define several functions.")); - } - } - - @Test - public void testWeight1fStillProducesWeighFunction() throws IOException { - IndexQueryParserService queryParser = queryParser(); - String queryString = jsonBuilder().startObject() - .startObject("function_score") - .startArray("functions") - .startObject() - .startObject("field_value_factor") - .field("field", "popularity") - .endObject() - .field("weight", 1.0) - .endObject() - .endArray() - .endObject() - .endObject().string(); - IndexService indexService = createIndex("testidx", client().admin().indices().prepareCreate("testidx") - .addMapping("doc",jsonBuilder().startObject() - .startObject("properties") - .startObject("popularity").field("type", "float").endObject() - .endObject() - .endObject())); - SearchContext.setCurrent(createSearchContext(indexService)); - Query query = queryParser.parse(queryString).query(); - assertThat(query, instanceOf(FunctionScoreQuery.class)); - assertThat(((FunctionScoreQuery) query).getFunction(), instanceOf(WeightFactorFunction.class)); - SearchContext.removeCurrent(); - } - - @Test - public void testProperErrorMessagesForMisplacedWeightsAndFunctions() throws IOException { - IndexQueryParserService queryParser = queryParser(); - String query = jsonBuilder().startObject().startObject("function_score") - .startArray("functions") - .startObject().startObject("script_score").field("script", "3").endObject().endObject() - .endArray() - .field("weight", 2) - .endObject().endObject().string(); - try { - queryParser.parse(query).query(); - fail("Expect exception here because array of functions and one weight in body is not allowed."); - } catch (ParsingException e) { - assertThat(e.getDetailedMessage(), containsString("you can either define [functions] array or a single function, not both. already found [functions] array, now encountering [weight].")); - } - query = jsonBuilder().startObject().startObject("function_score") - .field("weight", 2) - .startArray("functions") - .startObject().endObject() - .endArray() - .endObject().endObject().string(); - try { - queryParser.parse(query).query(); - fail("Expect exception here because array of functions and one weight in body is not allowed."); - } catch (ParsingException e) { - assertThat(e.getDetailedMessage(), containsString("you can either define [functions] array or a single function, not both. already found [weight], now encountering [functions].")); - } - } - - /** - * helper to extract term from TermQuery. */ - private Term getTerm(Query query) { - while (query instanceof QueryWrapperFilter) { - query = ((QueryWrapperFilter) query).getQuery(); - } - TermQuery wrapped = (TermQuery) query; - return wrapped.getTerm(); - } - - public void testDefaultBooleanQueryMinShouldMatch() throws Exception { - IndexQueryParserService queryParser = queryParser(); - - // Queries have a minShouldMatch of 0 - BooleanQuery bq = (BooleanQuery) queryParser.parse(boolQuery().must(termQuery("foo", "bar"))).query(); - assertEquals(0, bq.getMinimumNumberShouldMatch()); - - bq = (BooleanQuery) queryParser.parse(boolQuery().should(termQuery("foo", "bar"))).query(); - assertEquals(0, bq.getMinimumNumberShouldMatch()); - - // Filters have a minShouldMatch of 0/1 - ConstantScoreQuery csq = (ConstantScoreQuery) queryParser.parse(constantScoreQuery(boolQuery().must(termQuery("foo", "bar")))).query(); - bq = (BooleanQuery) csq.getQuery(); - assertEquals(0, bq.getMinimumNumberShouldMatch()); - - csq = (ConstantScoreQuery) queryParser.parse(constantScoreQuery(boolQuery().should(termQuery("foo", "bar")))).query(); - bq = (BooleanQuery) csq.getQuery(); - assertEquals(1, bq.getMinimumNumberShouldMatch()); - } - - public void testTermsQueryFilter() throws Exception { - // TermsQuery is tricky in that it parses differently as a query or a filter - IndexQueryParserService queryParser = queryParser(); - Query q = queryParser.parse(termsQuery("foo", "bar")).query(); - assertThat(q, instanceOf(BooleanQuery.class)); - - ConstantScoreQuery csq = (ConstantScoreQuery) queryParser.parse(constantScoreQuery(termsQuery("foo", "bar"))).query(); - q = csq.getQuery(); - assertThat(q, instanceOf(TermsQuery.class)); - } - - @Test - public void testBlendedRewriteMethod() throws IOException { - IndexQueryParserService queryParser = queryParser(); - for (String rewrite : Arrays.asList("top_terms_blended_freqs_10", "topTermsBlendedFreqs10")) { - Query parsedQuery = queryParser.parse(prefixQuery("field", "val").rewrite(rewrite)).query(); - assertThat(parsedQuery, instanceOf(PrefixQuery.class)); - PrefixQuery prefixQuery = (PrefixQuery) parsedQuery; - assertThat(prefixQuery.getPrefix(), equalTo(new Term("field", "val"))); - assertThat(prefixQuery.getRewriteMethod(), instanceOf(MultiTermQuery.TopTermsBlendedFreqScoringRewrite.class)); - } - } - - @Test - public void testSimpleQueryStringNoFields() throws Exception { - IndexQueryParserService queryParser = queryParser(); - String queryText = randomAsciiOfLengthBetween(1, 10).toLowerCase(Locale.ROOT); - String query = "{\n" + - " \"simple_query_string\" : {\n" + - " \"query\" : \"" + queryText + "\"\n" + - " }\n" + - "}"; - Query parsedQuery = queryParser.parse(query).query(); - assertThat(parsedQuery, instanceOf(TermQuery.class)); - TermQuery termQuery = (TermQuery) parsedQuery; - assertThat(termQuery.getTerm(), equalTo(new Term(MetaData.ALL, queryText))); - } -} diff --git a/core/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java new file mode 100644 index 00000000000..5ae54d4078a --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java @@ -0,0 +1,330 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query; + +import org.apache.lucene.index.Term; +import org.apache.lucene.search.*; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; +import org.junit.Test; + +import java.io.IOException; +import java.util.*; + +import static org.hamcrest.Matchers.*; + +public class SimpleQueryStringBuilderTests extends AbstractQueryTestCase { + + @Override + protected SimpleQueryStringBuilder doCreateTestQueryBuilder() { + SimpleQueryStringBuilder result = new SimpleQueryStringBuilder(randomAsciiOfLengthBetween(1, 10)); + if (randomBoolean()) { + result.analyzeWildcard(randomBoolean()); + } + if (randomBoolean()) { + result.lenient(randomBoolean()); + } + if (randomBoolean()) { + result.lowercaseExpandedTerms(randomBoolean()); + } + if (randomBoolean()) { + result.locale(randomLocale(getRandom())); + } + if (randomBoolean()) { + result.minimumShouldMatch(randomMinimumShouldMatch()); + } + if (randomBoolean()) { + result.analyzer(randomAnalyzer()); + } + if (randomBoolean()) { + result.defaultOperator(randomFrom(Operator.values())); + } + if (randomBoolean()) { + Set flagSet = new HashSet<>(); + int size = randomIntBetween(0, SimpleQueryStringFlag.values().length); + for (int i = 0; i < size; i++) { + flagSet.add(randomFrom(SimpleQueryStringFlag.values())); + } + if (flagSet.size() > 0) { + result.flags(flagSet.toArray(new SimpleQueryStringFlag[flagSet.size()])); + } + } + + int fieldCount = randomIntBetween(0, 10); + Map fields = new HashMap<>(); + for (int i = 0; i < fieldCount; i++) { + if (randomBoolean()) { + fields.put(randomAsciiOfLengthBetween(1, 10), AbstractQueryBuilder.DEFAULT_BOOST); + } else { + fields.put(randomBoolean() ? STRING_FIELD_NAME : randomAsciiOfLengthBetween(1, 10), 2.0f / randomIntBetween(1, 20)); + } + } + result.fields(fields); + + return result; + } + + @Test + public void testDefaults() { + SimpleQueryStringBuilder qb = new SimpleQueryStringBuilder("The quick brown fox."); + + assertEquals("Wrong default default boost.", AbstractQueryBuilder.DEFAULT_BOOST, qb.boost(), 0.001); + assertEquals("Wrong default default boost field.", AbstractQueryBuilder.DEFAULT_BOOST, SimpleQueryStringBuilder.DEFAULT_BOOST, + 0.001); + + assertEquals("Wrong default flags.", SimpleQueryStringFlag.ALL.value, qb.flags()); + assertEquals("Wrong default flags field.", SimpleQueryStringFlag.ALL.value(), SimpleQueryStringBuilder.DEFAULT_FLAGS); + + assertEquals("Wrong default default operator.", Operator.OR, qb.defaultOperator()); + assertEquals("Wrong default default operator field.", Operator.OR, SimpleQueryStringBuilder.DEFAULT_OPERATOR); + + assertEquals("Wrong default default locale.", Locale.ROOT, qb.locale()); + assertEquals("Wrong default default locale field.", Locale.ROOT, SimpleQueryStringBuilder.DEFAULT_LOCALE); + + assertEquals("Wrong default default analyze_wildcard.", false, qb.analyzeWildcard()); + assertEquals("Wrong default default analyze_wildcard field.", false, SimpleQueryStringBuilder.DEFAULT_ANALYZE_WILDCARD); + + assertEquals("Wrong default default lowercase_expanded_terms.", true, qb.lowercaseExpandedTerms()); + assertEquals("Wrong default default lowercase_expanded_terms field.", true, + SimpleQueryStringBuilder.DEFAULT_LOWERCASE_EXPANDED_TERMS); + + assertEquals("Wrong default default lenient.", false, qb.lenient()); + assertEquals("Wrong default default lenient field.", false, SimpleQueryStringBuilder.DEFAULT_LENIENT); + + assertEquals("Wrong default default locale.", Locale.ROOT, qb.locale()); + assertEquals("Wrong default default locale field.", Locale.ROOT, SimpleQueryStringBuilder.DEFAULT_LOCALE); + } + + @Test + public void testDefaultNullLocale() { + SimpleQueryStringBuilder qb = new SimpleQueryStringBuilder("The quick brown fox."); + qb.locale(null); + assertEquals("Setting locale to null should result in returning to default value.", SimpleQueryStringBuilder.DEFAULT_LOCALE, + qb.locale()); + } + + @Test + public void testDefaultNullComplainFlags() { + SimpleQueryStringBuilder qb = new SimpleQueryStringBuilder("The quick brown fox."); + qb.flags((SimpleQueryStringFlag[]) null); + assertEquals("Setting flags to null should result in returning to default value.", SimpleQueryStringBuilder.DEFAULT_FLAGS, + qb.flags()); + } + + @Test + public void testDefaultEmptyComplainFlags() { + SimpleQueryStringBuilder qb = new SimpleQueryStringBuilder("The quick brown fox."); + qb.flags(new SimpleQueryStringFlag[]{}); + assertEquals("Setting flags to empty should result in returning to default value.", SimpleQueryStringBuilder.DEFAULT_FLAGS, + qb.flags()); + } + + @Test + public void testDefaultNullComplainOp() { + SimpleQueryStringBuilder qb = new SimpleQueryStringBuilder("The quick brown fox."); + qb.defaultOperator(null); + assertEquals("Setting operator to null should result in returning to default value.", SimpleQueryStringBuilder.DEFAULT_OPERATOR, + qb.defaultOperator()); + } + + // Check operator handling, and default field handling. + @Test + public void testDefaultOperatorHandling() throws IOException { + SimpleQueryStringBuilder qb = new SimpleQueryStringBuilder("The quick brown fox.").field(STRING_FIELD_NAME); + QueryShardContext shardContext = createShardContext(); + shardContext.setAllowUnmappedFields(true); // to avoid occasional cases + // in setup where we didn't + // add types but strict field + // resolution + BooleanQuery boolQuery = (BooleanQuery) qb.toQuery(shardContext); + assertThat(shouldClauses(boolQuery), is(4)); + + qb.defaultOperator(Operator.AND); + boolQuery = (BooleanQuery) qb.toQuery(shardContext); + assertThat(shouldClauses(boolQuery), is(0)); + + qb.defaultOperator(Operator.OR); + boolQuery = (BooleanQuery) qb.toQuery(shardContext); + assertThat(shouldClauses(boolQuery), is(4)); + } + + @Test + public void testIllegalConstructorArg() { + try { + new SimpleQueryStringBuilder(null); + fail("cannot be null"); + } catch (IllegalArgumentException e) { + // expected + } + } + + @Test(expected = IllegalArgumentException.class) + public void testFieldCannotBeNull() { + SimpleQueryStringBuilder qb = createTestQueryBuilder(); + qb.field(null); + } + + @Test(expected = IllegalArgumentException.class) + public void testFieldCannotBeNullAndWeighted() { + SimpleQueryStringBuilder qb = createTestQueryBuilder(); + qb.field(null, AbstractQueryBuilder.DEFAULT_BOOST); + } + + @Test(expected = IllegalArgumentException.class) + public void testFieldCannotBeEmpty() { + SimpleQueryStringBuilder qb = createTestQueryBuilder(); + qb.field(""); + } + + @Test(expected = IllegalArgumentException.class) + public void testFieldCannotBeEmptyAndWeighted() { + SimpleQueryStringBuilder qb = createTestQueryBuilder(); + qb.field("", AbstractQueryBuilder.DEFAULT_BOOST); + } + + /** + * The following should fail fast - never silently set the map containing + * fields and weights to null but refuse to accept null instead. + * */ + @Test(expected = NullPointerException.class) + public void testFieldsCannotBeSetToNull() { + SimpleQueryStringBuilder qb = createTestQueryBuilder(); + qb.fields(null); + } + + @Test + public void testDefaultFieldParsing() throws IOException { + QueryParseContext context = createParseContext(); + String query = randomAsciiOfLengthBetween(1, 10).toLowerCase(Locale.ROOT); + String contentString = "{\n" + + " \"simple_query_string\" : {\n" + + " \"query\" : \"" + query + "\"" + + " }\n" + + "}"; + XContentParser parser = XContentFactory.xContent(contentString).createParser(contentString); + context.reset(parser); + SimpleQueryStringBuilder queryBuilder = new SimpleQueryStringParser().fromXContent(context); + assertThat(queryBuilder.value(), equalTo(query)); + assertThat(queryBuilder.fields(), notNullValue()); + assertThat(queryBuilder.fields().size(), equalTo(0)); + QueryShardContext shardContext = createShardContext(); + + // the remaining tests requires either a mapping that we register with types in base test setup + // no strict field resolution (version before V_1_4_0_Beta1) + if (getCurrentTypes().length > 0 || shardContext.indexQueryParserService().getIndexCreatedVersion().before(Version.V_1_4_0_Beta1)) { + Query luceneQuery = queryBuilder.toQuery(shardContext); + assertThat(luceneQuery, instanceOf(TermQuery.class)); + TermQuery termQuery = (TermQuery) luceneQuery; + assertThat(termQuery.getTerm(), equalTo(new Term(MetaData.ALL, query))); + } + } + + /* + * This assumes that Lucene query parsing is being checked already, adding + * checks only for our parsing extensions. + * + * Also this relies on {@link SimpleQueryStringTests} to test most of the + * actual functionality of query parsing. + */ + @Override + protected void doAssertLuceneQuery(SimpleQueryStringBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { + assertThat(query, notNullValue()); + + if ("".equals(queryBuilder.value())) { + assertTrue("Query should have been MatchNoDocsQuery but was " + query.getClass().getName(), query instanceof MatchNoDocsQuery); + } else if (queryBuilder.fields().size() > 1) { + assertTrue("Query should have been BooleanQuery but was " + query.getClass().getName(), query instanceof BooleanQuery); + + BooleanQuery boolQuery = (BooleanQuery) query; + if (queryBuilder.lowercaseExpandedTerms()) { + for (BooleanClause clause : boolQuery.clauses()) { + if (clause.getQuery() instanceof TermQuery) { + TermQuery inner = (TermQuery) clause.getQuery(); + assertThat(inner.getTerm().bytes().toString(), is(inner.getTerm().bytes().toString().toLowerCase(Locale.ROOT))); + } + } + } + + assertThat(boolQuery.clauses().size(), equalTo(queryBuilder.fields().size())); + Iterator fields = queryBuilder.fields().keySet().iterator(); + for (BooleanClause booleanClause : boolQuery) { + assertThat(booleanClause.getQuery(), instanceOf(TermQuery.class)); + TermQuery termQuery = (TermQuery) booleanClause.getQuery(); + assertThat(termQuery.getTerm().field(), equalTo(fields.next())); + assertThat(termQuery.getTerm().text().toLowerCase(Locale.ROOT), equalTo(queryBuilder.value().toLowerCase(Locale.ROOT))); + } + + if (queryBuilder.minimumShouldMatch() != null) { + assertThat(boolQuery.getMinimumNumberShouldMatch(), greaterThan(0)); + } + } else if (queryBuilder.fields().size() <= 1) { + assertTrue("Query should have been TermQuery but was " + query.getClass().getName(), query instanceof TermQuery); + + TermQuery termQuery = (TermQuery) query; + String field; + if (queryBuilder.fields().size() == 0) { + field = MetaData.ALL; + } else { + field = queryBuilder.fields().keySet().iterator().next(); + } + assertThat(termQuery.getTerm().field(), equalTo(field)); + assertThat(termQuery.getTerm().text().toLowerCase(Locale.ROOT), equalTo(queryBuilder.value().toLowerCase(Locale.ROOT))); + } else { + fail("Encountered lucene query type we do not have a validation implementation for in our " + SimpleQueryStringBuilderTests.class.getSimpleName()); + } + } + + @Override + protected void assertBoost(SimpleQueryStringBuilder queryBuilder, Query query) throws IOException { + //boost may get parsed from the random query, we then combine the main boost with that one coming from lucene + //instead of trying to reparse the query and guess what the boost should be, we delegate boost checks to specific boost tests below + } + + + private int shouldClauses(BooleanQuery query) { + int result = 0; + for (BooleanClause c : query.clauses()) { + if (c.getOccur() == BooleanClause.Occur.SHOULD) { + result++; + } + } + return result; + } + + @Test + public void testToQueryBoost() throws IOException { + assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); + QueryShardContext shardContext = createShardContext(); + SimpleQueryStringBuilder simpleQueryStringBuilder = new SimpleQueryStringBuilder("test"); + simpleQueryStringBuilder.field(STRING_FIELD_NAME, 5); + Query query = simpleQueryStringBuilder.toQuery(shardContext); + assertThat(query, instanceOf(TermQuery.class)); + assertThat(query.getBoost(), equalTo(5f)); + + simpleQueryStringBuilder = new SimpleQueryStringBuilder("test"); + simpleQueryStringBuilder.field(STRING_FIELD_NAME, 5); + simpleQueryStringBuilder.boost(2); + query = simpleQueryStringBuilder.toQuery(shardContext); + assertThat(query, instanceOf(TermQuery.class)); + assertThat(query.getBoost(), equalTo(10f)); + } +} diff --git a/core/src/test/java/org/elasticsearch/index/query/SpanContainingQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/SpanContainingQueryBuilderTests.java new file mode 100644 index 00000000000..ff5882a6fa9 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/query/SpanContainingQueryBuilderTests.java @@ -0,0 +1,59 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query; + +import org.apache.lucene.search.Query; +import org.apache.lucene.search.spans.SpanContainingQuery; +import org.junit.Test; + +import java.io.IOException; + +import static org.hamcrest.CoreMatchers.instanceOf; + +public class SpanContainingQueryBuilderTests extends AbstractQueryTestCase { + + @Override + protected SpanContainingQueryBuilder doCreateTestQueryBuilder() { + SpanTermQueryBuilder[] spanTermQueries = new SpanTermQueryBuilderTests().createSpanTermQueryBuilders(2); + return new SpanContainingQueryBuilder(spanTermQueries[0], spanTermQueries[1]); + } + + @Override + protected void doAssertLuceneQuery(SpanContainingQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { + assertThat(query, instanceOf(SpanContainingQuery.class)); + } + + @Test + public void testIllegalArguments() { + try { + new SpanContainingQueryBuilder(null, SpanTermQueryBuilder.PROTOTYPE); + fail("cannot be null"); + } catch (IllegalArgumentException e) { + // expected + } + + try { + new SpanContainingQueryBuilder(SpanTermQueryBuilder.PROTOTYPE, null); + fail("cannot be null"); + } catch (IllegalArgumentException e) { + // expected + } + } +} diff --git a/core/src/test/java/org/elasticsearch/index/query/SpanFirstQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/SpanFirstQueryBuilderTests.java new file mode 100644 index 00000000000..325db416be1 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/query/SpanFirstQueryBuilderTests.java @@ -0,0 +1,86 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query; + +import org.apache.lucene.search.Query; +import org.apache.lucene.search.spans.SpanFirstQuery; +import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.junit.Test; + +import java.io.IOException; + +import static org.elasticsearch.index.query.QueryBuilders.spanTermQuery; +import static org.hamcrest.CoreMatchers.instanceOf; + +public class SpanFirstQueryBuilderTests extends AbstractQueryTestCase { + + @Override + protected SpanFirstQueryBuilder doCreateTestQueryBuilder() { + SpanTermQueryBuilder[] spanTermQueries = new SpanTermQueryBuilderTests().createSpanTermQueryBuilders(1); + return new SpanFirstQueryBuilder(spanTermQueries[0], randomIntBetween(0, 1000)); + } + + @Override + protected void doAssertLuceneQuery(SpanFirstQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { + assertThat(query, instanceOf(SpanFirstQuery.class)); + } + + /** + * test exception on missing `end` and `match` parameter in parser + */ + @Test + public void testParseEnd() throws IOException { + + { + XContentBuilder builder = XContentFactory.jsonBuilder(); + builder.startObject(); + builder.startObject(SpanFirstQueryBuilder.NAME); + builder.field("match"); + spanTermQuery("description", "jumped").toXContent(builder, null); + builder.endObject(); + builder.endObject(); + + try { + parseQuery(builder.string()); + fail("missing [end] parameter should raise exception"); + } catch (ParsingException e) { + assertTrue(e.getMessage().contains("spanFirst must have [end] set")); + } + } + + { + XContentBuilder builder = XContentFactory.jsonBuilder(); + builder.startObject(); + builder.startObject(SpanFirstQueryBuilder.NAME); + builder.field("end", 10); + builder.endObject(); + builder.endObject(); + + try { + parseQuery(builder.string()); + fail("missing [match] parameter should raise exception"); + } catch (ParsingException e) { + assertTrue(e.getMessage().contains("spanFirst must have [match] span query clause")); + } + } + } +} diff --git a/core/src/test/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilderTests.java new file mode 100644 index 00000000000..7c9e50abf5a --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilderTests.java @@ -0,0 +1,79 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query; + +import org.apache.lucene.search.MultiTermQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.spans.SpanMultiTermQueryWrapper; +import org.junit.Test; + +import java.io.IOException; + +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.instanceOf; + +public class SpanMultiTermQueryBuilderTests extends AbstractQueryTestCase { + + @Override + protected SpanMultiTermQueryBuilder doCreateTestQueryBuilder() { + MultiTermQueryBuilder multiTermQueryBuilder = RandomQueryBuilder.createMultiTermQuery(random()); + return new SpanMultiTermQueryBuilder(multiTermQueryBuilder); + } + + @Override + protected void doAssertLuceneQuery(SpanMultiTermQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { + assertThat(query, instanceOf(SpanMultiTermQueryWrapper.class)); + SpanMultiTermQueryWrapper spanMultiTermQueryWrapper = (SpanMultiTermQueryWrapper) query; + Query multiTermQuery = queryBuilder.innerQuery().toQuery(context); + assertThat(multiTermQuery, instanceOf(MultiTermQuery.class)); + assertThat(spanMultiTermQueryWrapper.getWrappedQuery(), equalTo(new SpanMultiTermQueryWrapper<>((MultiTermQuery)multiTermQuery).getWrappedQuery())); + } + + @Test + public void testIllegalArgument() { + try { + new SpanMultiTermQueryBuilder(null); + fail("cannot be null"); + } catch (IllegalArgumentException e) { + // expected + } + } + + /** + * test checks that we throw an {@link UnsupportedOperationException} if the query wrapped + * by {@link SpanMultiTermQueryBuilder} does not generate a lucene {@link MultiTermQuery}. + * This is currently the case for {@link RangeQueryBuilder} when the target field is mapped + * to a date. + */ + @Test + public void testUnsupportedInnerQueryType() throws IOException { + QueryShardContext context = createShardContext(); + // test makes only sense if we have at least one type registered with date field mapping + if (getCurrentTypes().length > 0 && context.fieldMapper(DATE_FIELD_NAME) != null) { + try { + RangeQueryBuilder query = new RangeQueryBuilder(DATE_FIELD_NAME); + new SpanMultiTermQueryBuilder(query).toQuery(createShardContext()); + fail("Exception expected, range query on date fields should not generate a lucene " + MultiTermQuery.class.getName()); + } catch (UnsupportedOperationException e) { + assert(e.getMessage().contains("unsupported inner query, should be " + MultiTermQuery.class.getName())); + } + } + } +} diff --git a/core/src/test/java/org/elasticsearch/index/query/SpanNearQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/SpanNearQueryBuilderTests.java new file mode 100644 index 00000000000..02dcddb6636 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/query/SpanNearQueryBuilderTests.java @@ -0,0 +1,77 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query; + +import org.apache.lucene.search.Query; +import org.apache.lucene.search.spans.SpanNearQuery; +import org.apache.lucene.search.spans.SpanQuery; +import org.junit.Test; + +import java.io.IOException; +import java.util.Iterator; + +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.instanceOf; + +public class SpanNearQueryBuilderTests extends AbstractQueryTestCase { + + @Override + protected SpanNearQueryBuilder doCreateTestQueryBuilder() { + SpanTermQueryBuilder[] spanTermQueries = new SpanTermQueryBuilderTests().createSpanTermQueryBuilders(randomIntBetween(1, 6)); + SpanNearQueryBuilder queryBuilder = new SpanNearQueryBuilder(spanTermQueries[0], randomIntBetween(-10, 10)); + for (int i = 1; i < spanTermQueries.length; i++) { + queryBuilder.clause(spanTermQueries[i]); + } + queryBuilder.inOrder(randomBoolean()); + queryBuilder.collectPayloads(randomBoolean()); + return queryBuilder; + } + + @Override + protected void doAssertLuceneQuery(SpanNearQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { + assertThat(query, instanceOf(SpanNearQuery.class)); + SpanNearQuery spanNearQuery = (SpanNearQuery) query; + assertThat(spanNearQuery.getSlop(), equalTo(queryBuilder.slop())); + assertThat(spanNearQuery.isInOrder(), equalTo(queryBuilder.inOrder())); + assertThat(spanNearQuery.getClauses().length, equalTo(queryBuilder.clauses().size())); + Iterator spanQueryBuilderIterator = queryBuilder.clauses().iterator(); + for (SpanQuery spanQuery : spanNearQuery.getClauses()) { + assertThat(spanQuery, equalTo(spanQueryBuilderIterator.next().toQuery(context))); + } + } + + @Test + public void testIllegalArguments() { + try { + new SpanNearQueryBuilder(null, 1); + fail("cannot be null"); + } catch (IllegalArgumentException e) { + // ecpected + } + + try { + SpanNearQueryBuilder spanNearQueryBuilder = new SpanNearQueryBuilder(SpanTermQueryBuilder.PROTOTYPE, 1); + spanNearQueryBuilder.clause(null); + fail("cannot be null"); + } catch (IllegalArgumentException e) { + // ecpected + } + } +} diff --git a/core/src/test/java/org/elasticsearch/index/query/SpanNotQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/SpanNotQueryBuilderTests.java new file mode 100644 index 00000000000..1b711f19dd3 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/query/SpanNotQueryBuilderTests.java @@ -0,0 +1,193 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query; + +import org.apache.lucene.search.Query; +import org.apache.lucene.search.spans.SpanNotQuery; +import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.junit.Test; + +import java.io.IOException; + +import static org.elasticsearch.index.query.QueryBuilders.spanNearQuery; +import static org.elasticsearch.index.query.QueryBuilders.spanTermQuery; +import static org.hamcrest.Matchers.*; + +public class SpanNotQueryBuilderTests extends AbstractQueryTestCase { + + @Override + protected SpanNotQueryBuilder doCreateTestQueryBuilder() { + SpanTermQueryBuilder[] spanTermQueries = new SpanTermQueryBuilderTests().createSpanTermQueryBuilders(2); + SpanNotQueryBuilder queryBuilder = new SpanNotQueryBuilder(spanTermQueries[0], spanTermQueries[1]); + if (randomBoolean()) { + // also test negative values, they should implicitly be changed to 0 + queryBuilder.dist(randomIntBetween(-2, 10)); + } else { + if (randomBoolean()) { + queryBuilder.pre(randomIntBetween(-2, 10)); + } + if (randomBoolean()) { + queryBuilder.post(randomIntBetween(-2, 10)); + } + } + return queryBuilder; + } + + @Override + protected void doAssertLuceneQuery(SpanNotQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { + assertThat(query, instanceOf(SpanNotQuery.class)); + SpanNotQuery spanNotQuery = (SpanNotQuery) query; + assertThat(spanNotQuery.getExclude(), equalTo(queryBuilder.excludeQuery().toQuery(context))); + assertThat(spanNotQuery.getInclude(), equalTo(queryBuilder.includeQuery().toQuery(context))); + } + + @Test + public void testIllegalArgument() { + try { + new SpanNotQueryBuilder(null, SpanTermQueryBuilder.PROTOTYPE); + fail("cannot be null"); + } catch (IllegalArgumentException e) { + // expected + } + try { + new SpanNotQueryBuilder(SpanTermQueryBuilder.PROTOTYPE, null); + fail("cannot be null"); + } catch (IllegalArgumentException e) { + // expected + } + } + + @Test + public void testDist() { + SpanNotQueryBuilder builder = new SpanNotQueryBuilder(new SpanTermQueryBuilder("name1", "value1"), new SpanTermQueryBuilder("name2", "value2")); + assertThat(builder.pre(), equalTo(0)); + assertThat(builder.post(), equalTo(0)); + builder.dist(-4); + assertThat(builder.pre(), equalTo(0)); + assertThat(builder.post(), equalTo(0)); + builder.dist(4); + assertThat(builder.pre(), equalTo(4)); + assertThat(builder.post(), equalTo(4)); + } + + @Test + public void testPrePost() { + SpanNotQueryBuilder builder = new SpanNotQueryBuilder(new SpanTermQueryBuilder("name1", "value1"), new SpanTermQueryBuilder("name2", "value2")); + assertThat(builder.pre(), equalTo(0)); + assertThat(builder.post(), equalTo(0)); + builder.pre(-4).post(-4); + assertThat(builder.pre(), equalTo(0)); + assertThat(builder.post(), equalTo(0)); + builder.pre(1).post(2); + assertThat(builder.pre(), equalTo(1)); + assertThat(builder.post(), equalTo(2)); + } + + /** + * test correct parsing of `dist` parameter, this should create builder with pre/post set to same value + */ + @Test + public void testParseDist() throws IOException { + XContentBuilder builder = XContentFactory.jsonBuilder(); + builder.startObject(); + builder.startObject(SpanNotQueryBuilder.NAME); + builder.field("exclude"); + spanTermQuery("description", "jumped").toXContent(builder, null); + builder.field("include"); + spanNearQuery(QueryBuilders.spanTermQuery("description", "quick"), 1) + .clause(QueryBuilders.spanTermQuery("description", "fox")).toXContent(builder, null); + builder.field("dist", 3); + builder.endObject(); + builder.endObject(); + SpanNotQueryBuilder query = (SpanNotQueryBuilder)parseQuery(builder.string()); + assertThat(query.pre(), equalTo(3)); + assertThat(query.post(), equalTo(3)); + assertNotNull(query.includeQuery()); + assertNotNull(query.excludeQuery()); + } + + /** + * test exceptions for three types of broken json, missing include / exclude and both dist and pre/post specified + */ + @Test + public void testParserExceptions() throws IOException { + + { + XContentBuilder builder = XContentFactory.jsonBuilder(); + builder.startObject(); + builder.startObject(SpanNotQueryBuilder.NAME); + builder.field("exclude"); + spanTermQuery("description", "jumped").toXContent(builder, null); + builder.field("dist", 2); + builder.endObject(); + builder.endObject(); + + try { + parseQuery(builder.string()); + fail("ParsingException should have been caught"); + } catch (ParsingException e) { + assertThat("ParsingException should have been caught", e.getDetailedMessage(), containsString("spanNot must have [include]")); + } + } + + { + XContentBuilder builder = XContentFactory.jsonBuilder(); + builder.startObject(); + builder.startObject(SpanNotQueryBuilder.NAME); + builder.field("include"); + spanNearQuery(QueryBuilders.spanTermQuery("description", "quick"), 1) + .clause(QueryBuilders.spanTermQuery("description", "fox")).toXContent(builder, null); + builder.field("dist", 2); + builder.endObject(); + builder.endObject(); + + try { + parseQuery(builder.string()); + fail("ParsingException should have been caught"); + } catch (ParsingException e) { + assertThat("ParsingException should have been caught", e.getDetailedMessage(), containsString("spanNot must have [exclude]")); + } + } + + { + XContentBuilder builder = XContentFactory.jsonBuilder(); + builder.startObject(); + builder.startObject(SpanNotQueryBuilder.NAME); + builder.field("include"); + spanNearQuery(QueryBuilders.spanTermQuery("description", "quick"), 1) + .clause(QueryBuilders.spanTermQuery("description", "fox")).toXContent(builder, null); + builder.field("exclude"); + spanTermQuery("description", "jumped").toXContent(builder, null); + builder.field("dist", 2); + builder.field("pre", 2); + builder.endObject(); + builder.endObject(); + + try { + parseQuery(builder.string()); + fail("ParsingException should have been caught"); + } catch (ParsingException e) { + assertThat("ParsingException should have been caught", e.getDetailedMessage(), containsString("spanNot can either use [dist] or [pre] & [post] (or none)")); + } + } + } +} diff --git a/core/src/test/java/org/elasticsearch/index/query/SpanOrQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/SpanOrQueryBuilderTests.java new file mode 100644 index 00000000000..eaa70354bad --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/query/SpanOrQueryBuilderTests.java @@ -0,0 +1,73 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query; + +import org.apache.lucene.search.Query; +import org.apache.lucene.search.spans.SpanOrQuery; +import org.apache.lucene.search.spans.SpanQuery; +import org.junit.Test; + +import java.io.IOException; +import java.util.Iterator; + +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.instanceOf; + +public class SpanOrQueryBuilderTests extends AbstractQueryTestCase { + + @Override + protected SpanOrQueryBuilder doCreateTestQueryBuilder() { + SpanTermQueryBuilder[] spanTermQueries = new SpanTermQueryBuilderTests().createSpanTermQueryBuilders(randomIntBetween(1, 6)); + SpanOrQueryBuilder queryBuilder = new SpanOrQueryBuilder(spanTermQueries[0]); + for (int i = 1; i < spanTermQueries.length; i++) { + queryBuilder.clause(spanTermQueries[i]); + } + return queryBuilder; + } + + @Override + protected void doAssertLuceneQuery(SpanOrQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { + assertThat(query, instanceOf(SpanOrQuery.class)); + SpanOrQuery spanOrQuery = (SpanOrQuery) query; + assertThat(spanOrQuery.getClauses().length, equalTo(queryBuilder.clauses().size())); + Iterator spanQueryBuilderIterator = queryBuilder.clauses().iterator(); + for (SpanQuery spanQuery : spanOrQuery.getClauses()) { + assertThat(spanQuery, equalTo(spanQueryBuilderIterator.next().toQuery(context))); + } + } + + @Test + public void testIllegalArguments() { + try { + new SpanOrQueryBuilder(null); + fail("cannot be null"); + } catch (IllegalArgumentException e) { + // expected + } + + try { + SpanOrQueryBuilder spanOrBuilder = new SpanOrQueryBuilder(SpanTermQueryBuilder.PROTOTYPE); + spanOrBuilder.clause(null); + fail("cannot be null"); + } catch (IllegalArgumentException e) { + // expected + } + } +} diff --git a/core/src/test/java/org/elasticsearch/index/query/SpanTermQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/SpanTermQueryBuilderTests.java new file mode 100644 index 00000000000..a51efc6f985 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/query/SpanTermQueryBuilderTests.java @@ -0,0 +1,75 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query; + +import org.apache.lucene.search.Query; +import org.apache.lucene.search.spans.SpanTermQuery; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.lucene.BytesRefs; +import org.elasticsearch.index.mapper.MappedFieldType; + +import java.io.IOException; + +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.instanceOf; + +public class SpanTermQueryBuilderTests extends AbstractTermQueryTestCase { + + @Override + protected SpanTermQueryBuilder createQueryBuilder(String fieldName, Object value) { + return new SpanTermQueryBuilder(fieldName, value); + } + + @Override + protected void doAssertLuceneQuery(SpanTermQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { + assertThat(query, instanceOf(SpanTermQuery.class)); + SpanTermQuery spanTermQuery = (SpanTermQuery) query; + assertThat(spanTermQuery.getTerm().field(), equalTo(queryBuilder.fieldName())); + MappedFieldType mapper = context.fieldMapper(queryBuilder.fieldName()); + if (mapper != null) { + BytesRef bytesRef = mapper.indexedValueForSearch(queryBuilder.value()); + assertThat(spanTermQuery.getTerm().bytes(), equalTo(bytesRef)); + } else { + assertThat(spanTermQuery.getTerm().bytes(), equalTo(BytesRefs.toBytesRef(queryBuilder.value()))); + } + } + + /** + * @param amount the number of clauses that will be returned + * @return an array of random {@link SpanTermQueryBuilder} with same field name + */ + public SpanTermQueryBuilder[] createSpanTermQueryBuilders(int amount) { + SpanTermQueryBuilder[] clauses = new SpanTermQueryBuilder[amount]; + SpanTermQueryBuilder first = createTestQueryBuilder(); + clauses[0] = first; + for (int i = 1; i < amount; i++) { + // we need same field name in all clauses, so we only randomize value + SpanTermQueryBuilder spanTermQuery = new SpanTermQueryBuilder(first.fieldName(), getRandomValueForFieldName(first.fieldName())); + if (randomBoolean()) { + spanTermQuery.boost(2.0f / randomIntBetween(1, 20)); + } + if (randomBoolean()) { + spanTermQuery.queryName(randomAsciiOfLengthBetween(1, 10)); + } + clauses[i] = spanTermQuery; + } + return clauses; + } +} diff --git a/core/src/test/java/org/elasticsearch/index/query/SpanWithinQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/SpanWithinQueryBuilderTests.java new file mode 100644 index 00000000000..87b2380f624 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/query/SpanWithinQueryBuilderTests.java @@ -0,0 +1,59 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query; + +import org.apache.lucene.search.Query; +import org.apache.lucene.search.spans.SpanWithinQuery; +import org.junit.Test; + +import java.io.IOException; + +import static org.hamcrest.CoreMatchers.instanceOf; + +public class SpanWithinQueryBuilderTests extends AbstractQueryTestCase { + + @Override + protected SpanWithinQueryBuilder doCreateTestQueryBuilder() { + SpanTermQueryBuilder[] spanTermQueries = new SpanTermQueryBuilderTests().createSpanTermQueryBuilders(2); + return new SpanWithinQueryBuilder(spanTermQueries[0], spanTermQueries[1]); + } + + @Override + protected void doAssertLuceneQuery(SpanWithinQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { + assertThat(query, instanceOf(SpanWithinQuery.class)); + } + + @Test + public void testIllegalArguments() { + try { + new SpanWithinQueryBuilder(null, SpanTermQueryBuilder.PROTOTYPE); + fail("cannot be null"); + } catch (IllegalArgumentException e) { + // expected + } + + try { + new SpanWithinQueryBuilder(SpanTermQueryBuilder.PROTOTYPE, null); + fail("cannot be null"); + } catch (IllegalArgumentException e) { + // expected + } + } +} diff --git a/core/src/test/java/org/elasticsearch/index/query/TemplateQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/TemplateQueryBuilderTests.java index 647ac44c673..3d89633de48 100644 --- a/core/src/test/java/org/elasticsearch/index/query/TemplateQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/TemplateQueryBuilderTests.java @@ -16,23 +16,62 @@ * specific language governing permissions and limitations * under the License. */ + package org.elasticsearch.index.query; +import org.apache.lucene.search.Query; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.script.ScriptService.ScriptType; import org.elasticsearch.script.Template; -import org.elasticsearch.test.ESTestCase; +import org.junit.BeforeClass; import org.junit.Test; import java.io.IOException; import java.util.HashMap; import java.util.Map; -/** - * Test building and serialising a template search request. - * */ -public class TemplateQueryBuilderTests extends ESTestCase { +public class TemplateQueryBuilderTests extends AbstractQueryTestCase { + + /** + * The query type all template tests will be based on. + */ + private static QueryBuilder templateBase; + + @BeforeClass + public static void setupClass() { + templateBase = RandomQueryBuilder.createQuery(getRandom()); + } + + @Override + protected boolean supportsBoostAndQueryName() { + return false; + } + + @Override + protected TemplateQueryBuilder doCreateTestQueryBuilder() { + return new TemplateQueryBuilder(new Template(templateBase.toString())); + } + + @Override + protected void doAssertLuceneQuery(TemplateQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { + assertEquals(templateBase.toQuery(context), query); + } + + @Test + public void testIllegalArgument() { + try { + new TemplateQueryBuilder(null); + fail("cannot be null"); + } catch (IllegalArgumentException e) { + // expected + } + } + + @Override + protected void assertBoost(TemplateQueryBuilder queryBuilder, Query query) throws IOException { + //no-op boost is checked already above as part of doAssertLuceneQuery as we rely on lucene equals impl + } @Test public void testJSONGeneration() throws IOException { diff --git a/core/src/test/java/org/elasticsearch/index/query/TemplateQueryParserTests.java b/core/src/test/java/org/elasticsearch/index/query/TemplateQueryParserTests.java index 6527e112c9c..40a002db881 100644 --- a/core/src/test/java/org/elasticsearch/index/query/TemplateQueryParserTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/TemplateQueryParserTests.java @@ -21,6 +21,7 @@ package org.elasticsearch.index.query; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; import org.elasticsearch.Version; +import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.ParsingException; @@ -55,6 +56,7 @@ import org.junit.Before; import org.junit.Test; import java.io.IOException; +import java.lang.reflect.Proxy; /** * Test parsing and executing a template request. @@ -63,7 +65,7 @@ import java.io.IOException; public class TemplateQueryParserTests extends ESTestCase { private Injector injector; - private QueryParseContext context; + private QueryShardContext context; @Before public void setup() throws IOException { @@ -73,7 +75,11 @@ public class TemplateQueryParserTests extends ESTestCase { .put("name", getClass().getName()) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .build(); - + final Client proxy = (Client) Proxy.newProxyInstance( + Client.class.getClassLoader(), + new Class[]{Client.class}, (proxy1, method, args) -> { + throw new UnsupportedOperationException("client is just a dummy"); + }); Index index = new Index("test"); injector = new ModulesBuilder().add( new EnvironmentModule(new Environment(settings)), @@ -95,6 +101,7 @@ public class TemplateQueryParserTests extends ESTestCase { new AbstractModule() { @Override protected void configure() { + bind(Client.class).toInstance(proxy); // not needed here Multibinder.newSetBinder(binder(), ScoreFunctionParser.class); bind(ClusterService.class).toProvider(Providers.of((ClusterService) null)); bind(CircuitBreakerService.class).to(NoneCircuitBreakerService.class); @@ -103,7 +110,7 @@ public class TemplateQueryParserTests extends ESTestCase { ).createInjector(); IndexQueryParserService queryParserService = injector.getInstance(IndexQueryParserService.class); - context = new QueryParseContext(index, queryParserService); + context = new QueryShardContext(index, queryParserService); } @Override @@ -122,7 +129,7 @@ public class TemplateQueryParserTests extends ESTestCase { templateSourceParser.nextToken(); TemplateQueryParser parser = injector.getInstance(TemplateQueryParser.class); - Query query = parser.parse(context); + Query query = parser.fromXContent(context.parseContext()).toQuery(context); assertTrue("Parsing template query failed.", query instanceof MatchAllDocsQuery); } @@ -134,7 +141,7 @@ public class TemplateQueryParserTests extends ESTestCase { context.reset(templateSourceParser); TemplateQueryParser parser = injector.getInstance(TemplateQueryParser.class); - Query query = parser.parse(context); + Query query = parser.fromXContent(context.parseContext()).toQuery(context); assertTrue("Parsing template query failed.", query instanceof MatchAllDocsQuery); } @@ -152,7 +159,7 @@ public class TemplateQueryParserTests extends ESTestCase { context.reset(templateSourceParser); TemplateQueryParser parser = injector.getInstance(TemplateQueryParser.class); - parser.parse(context); + parser.fromXContent(context.parseContext()).toQuery(context); } @Test @@ -164,7 +171,7 @@ public class TemplateQueryParserTests extends ESTestCase { templateSourceParser.nextToken(); TemplateQueryParser parser = injector.getInstance(TemplateQueryParser.class); - Query query = parser.parse(context); + Query query = parser.fromXContent(context.parseContext()).toQuery(context); assertTrue("Parsing template query failed.", query instanceof MatchAllDocsQuery); } } diff --git a/core/src/test/java/org/elasticsearch/index/query/TermQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/TermQueryBuilderTests.java new file mode 100644 index 00000000000..f79e249ac96 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/query/TermQueryBuilderTests.java @@ -0,0 +1,68 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query; + +import org.apache.lucene.search.Query; +import org.apache.lucene.search.TermQuery; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.lucene.BytesRefs; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.junit.Test; + +import java.io.IOException; + +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.instanceOf; + +public class TermQueryBuilderTests extends AbstractTermQueryTestCase { + + /** + * @return a TermQuery with random field name and value, optional random boost and queryname + */ + @Override + protected TermQueryBuilder createQueryBuilder(String fieldName, Object value) { + return new TermQueryBuilder(fieldName, value); + } + + @Override + protected void doAssertLuceneQuery(TermQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { + assertThat(query, instanceOf(TermQuery.class)); + TermQuery termQuery = (TermQuery) query; + assertThat(termQuery.getTerm().field(), equalTo(queryBuilder.fieldName())); + MappedFieldType mapper = context.fieldMapper(queryBuilder.fieldName()); + if (mapper != null) { + BytesRef bytesRef = mapper.indexedValueForSearch(queryBuilder.value()); + assertThat(termQuery.getTerm().bytes(), equalTo(bytesRef)); + } else { + assertThat(termQuery.getTerm().bytes(), equalTo(BytesRefs.toBytesRef(queryBuilder.value()))); + } + } + + @Test(expected = ParsingException.class) + public void testTermArray() throws IOException { + String queryAsString = "{\n" + + " \"term\": {\n" + + " \"age\": [34, 35]\n" + + " }\n" + + "}"; + parseQuery(queryAsString); + } +} diff --git a/core/src/test/java/org/elasticsearch/index/query/TermsQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/TermsQueryBuilderTests.java new file mode 100644 index 00000000000..b810d6be2b8 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/query/TermsQueryBuilderTests.java @@ -0,0 +1,305 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query; + +import com.carrotsearch.randomizedtesting.generators.RandomPicks; + +import org.apache.lucene.index.Term; +import org.apache.lucene.search.BooleanClause; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.TermQuery; +import org.apache.lucene.util.CollectionUtil; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.get.GetRequest; +import org.elasticsearch.action.get.GetResponse; +import org.elasticsearch.common.ParseFieldMatcher; +import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.index.get.GetResult; +import org.elasticsearch.indices.cache.query.terms.TermsLookup; +import org.hamcrest.Matchers; +import org.junit.Before; +import org.junit.Test; + +import java.io.IOException; +import java.util.*; + +import static org.hamcrest.Matchers.*; + +public class TermsQueryBuilderTests extends AbstractQueryTestCase { + + private List randomTerms; + private String termsPath; + + @Before + public void randomTerms() { + List randomTerms = new ArrayList<>(); + String[] strings = generateRandomStringArray(10, 10, false, true); + for (String string : strings) { + randomTerms.add(string); + if (rarely()) { + randomTerms.add(null); + } + } + this.randomTerms = randomTerms; + termsPath = randomAsciiOfLength(10).replace('.', '_'); + } + + @Override + protected TermsQueryBuilder doCreateTestQueryBuilder() { + TermsQueryBuilder query; + // terms query or lookup query + if (randomBoolean()) { + // make between 0 and 5 different values of the same type + String fieldName = getRandomFieldName(); + Object[] values = new Object[randomInt(5)]; + for (int i = 0; i < values.length; i++) { + values[i] = getRandomValueForFieldName(fieldName); + } + query = new TermsQueryBuilder(fieldName, values); + } else { + // right now the mock service returns us a list of strings + query = new TermsQueryBuilder(randomBoolean() ? randomAsciiOfLengthBetween(1,10) : STRING_FIELD_NAME, randomTermsLookup()); + } + return query; + } + + private TermsLookup randomTermsLookup() { + return new TermsLookup(randomBoolean() ? randomAsciiOfLength(10) : null, randomAsciiOfLength(10), randomAsciiOfLength(10), + termsPath).routing(randomBoolean() ? randomAsciiOfLength(10) : null); + } + + @Override + protected void doAssertLuceneQuery(TermsQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { + assertThat(query, instanceOf(BooleanQuery.class)); + BooleanQuery booleanQuery = (BooleanQuery) query; + + // we only do the check below for string fields (otherwise we'd have to decode the values) + if (queryBuilder.fieldName().equals(INT_FIELD_NAME) || queryBuilder.fieldName().equals(DOUBLE_FIELD_NAME) + || queryBuilder.fieldName().equals(BOOLEAN_FIELD_NAME) || queryBuilder.fieldName().equals(DATE_FIELD_NAME)) { + return; + } + + // expected returned terms depending on whether we have a terms query or a terms lookup query + List terms; + if (queryBuilder.termsLookup() != null) { + terms = randomTerms; + } else { + terms = queryBuilder.values(); + } + + // compare whether we have the expected list of terms returned + final List booleanTerms = new ArrayList<>(); + for (BooleanClause booleanClause : booleanQuery) { + assertThat(booleanClause.getOccur(), equalTo(BooleanClause.Occur.SHOULD)); + assertThat(booleanClause.getQuery(), instanceOf(TermQuery.class)); + Term term = ((TermQuery) booleanClause.getQuery()).getTerm(); + booleanTerms.add(term); + } + CollectionUtil.timSort(booleanTerms); + List expectedTerms = new ArrayList<>(); + for (Object term : terms) { + if (term != null) { // terms lookup filters this out + expectedTerms.add(new Term(queryBuilder.fieldName(), term.toString())); + } + } + CollectionUtil.timSort(expectedTerms); + assertEquals(expectedTerms + " vs. " + booleanTerms, expectedTerms.size(), booleanTerms.size()); + assertEquals(expectedTerms + " vs. " + booleanTerms, expectedTerms, booleanTerms); + } + + @Test(expected=IllegalArgumentException.class) + public void testEmtpyFieldName() { + if (randomBoolean()) { + new TermsQueryBuilder(null, "term"); + } else { + new TermsQueryBuilder("", "term"); + } + } + + @Test(expected=IllegalArgumentException.class) + public void testEmtpyTermsLookup() { + new TermsQueryBuilder("field", (TermsLookup) null); + } + + @Test + public void testNullValues() { + try { + switch (randomInt(6)) { + case 0: + new TermsQueryBuilder("field", (String[]) null); + break; + case 1: + new TermsQueryBuilder("field", (int[]) null); + break; + case 2: + new TermsQueryBuilder("field", (long[]) null); + break; + case 3: + new TermsQueryBuilder("field", (float[]) null); + break; + case 4: + new TermsQueryBuilder("field", (double[]) null); + break; + case 5: + new TermsQueryBuilder("field", (Object[]) null); + break; + default: + new TermsQueryBuilder("field", (Iterable) null); + break; + } + fail("should have failed with IllegalArgumentException"); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), Matchers.containsString("No value specified for terms query")); + } + } + + @Test(expected=IllegalArgumentException.class) + public void testBothValuesAndLookupSet() throws IOException { + String query = "{\n" + + " \"terms\": {\n" + + " \"field\": [\n" + + " \"blue\",\n" + + " \"pill\"\n" + + " ],\n" + + " \"field_lookup\": {\n" + + " \"index\": \"pills\",\n" + + " \"type\": \"red\",\n" + + " \"id\": \"3\",\n" + + " \"path\": \"white rabbit\"\n" + + " }\n" + + " }\n" + + "}"; + QueryBuilder termsQueryBuilder = parseQuery(query); + } + + public void testDeprecatedXContent() throws IOException { + String query = "{\n" + + " \"terms\": {\n" + + " \"field\": [\n" + + " \"blue\",\n" + + " \"pill\"\n" + + " ],\n" + + " \"disable_coord\": true\n" + + " }\n" + + "}"; + try { + parseQuery(query); + fail("disable_coord is deprecated"); + } catch (IllegalArgumentException ex) { + assertEquals("Deprecated field [disable_coord] used, replaced by [Use [bool] query instead]", ex.getMessage()); + } + + TermsQueryBuilder queryBuilder = (TermsQueryBuilder) parseQuery(query, ParseFieldMatcher.EMPTY); + TermsQueryBuilder copy = assertSerialization(queryBuilder); + assertTrue(queryBuilder.disableCoord()); + assertTrue(copy.disableCoord()); + Query luceneQuery = queryBuilder.toQuery(createShardContext()); + assertThat(luceneQuery, instanceOf(BooleanQuery.class)); + BooleanQuery booleanQuery = (BooleanQuery) luceneQuery; + assertThat(booleanQuery.isCoordDisabled(), equalTo(true)); + + String randomMinShouldMatch = RandomPicks.randomFrom(random(), Arrays.asList("min_match", "min_should_match", "minimum_should_match")); + query = "{\n" + + " \"terms\": {\n" + + " \"field\": [\n" + + " \"value1\",\n" + + " \"value2\",\n" + + " \"value3\",\n" + + " \"value4\"\n" + + " ],\n" + + " \"" + randomMinShouldMatch +"\": \"25%\"\n" + + " }\n" + + "}"; + try { + parseQuery(query); + fail(randomMinShouldMatch + " is deprecated"); + } catch (IllegalArgumentException ex) { + assertEquals("Deprecated field [" + randomMinShouldMatch + "] used, replaced by [Use [bool] query instead]", ex.getMessage()); + } + queryBuilder = (TermsQueryBuilder) parseQuery(query, ParseFieldMatcher.EMPTY); + copy = assertSerialization(queryBuilder); + assertEquals("25%", queryBuilder.minimumShouldMatch()); + assertEquals("25%", copy.minimumShouldMatch()); + luceneQuery = queryBuilder.toQuery(createShardContext()); + assertThat(luceneQuery, instanceOf(BooleanQuery.class)); + booleanQuery = (BooleanQuery) luceneQuery; + assertThat(booleanQuery.getMinimumNumberShouldMatch(), equalTo(1)); + } + + @Override + public GetResponse executeGet(GetRequest getRequest) { + String json; + try { + XContentBuilder builder = XContentFactory.jsonBuilder().prettyPrint(); + builder.startObject(); + builder.array(termsPath, randomTerms.toArray(new Object[randomTerms.size()])); + builder.endObject(); + json = builder.string(); + } catch (IOException ex) { + throw new ElasticsearchException("boom", ex); + } + return new GetResponse(new GetResult(getRequest.index(), getRequest.type(), getRequest.id(), 0, true, new BytesArray(json), null)); + } + + public void testNumeric() throws IOException { + { + TermsQueryBuilder builder = new TermsQueryBuilder("foo", new int[]{1, 3, 4}); + TermsQueryBuilder copy = assertSerialization(builder); + List values = copy.values(); + assertEquals(Arrays.asList(1, 3, 4), values); + } + { + TermsQueryBuilder builder = new TermsQueryBuilder("foo", new double[]{1, 3, 4}); + TermsQueryBuilder copy = assertSerialization(builder); + List values = copy.values(); + assertEquals(Arrays.asList(1d, 3d, 4d), values); + } + { + TermsQueryBuilder builder = new TermsQueryBuilder("foo", new float[]{1, 3, 4}); + TermsQueryBuilder copy = assertSerialization(builder); + List values = copy.values(); + assertEquals(Arrays.asList(1f, 3f, 4f), values); + } + { + TermsQueryBuilder builder = new TermsQueryBuilder("foo", new long[]{1, 3, 4}); + TermsQueryBuilder copy = assertSerialization(builder); + List values = copy.values(); + assertEquals(Arrays.asList(1l, 3l, 4l), values); + } + } + + @Test + public void testTermsQueryWithMultipleFields() throws IOException { + String query = XContentFactory.jsonBuilder().startObject() + .startObject("terms").array("foo", 123).array("bar", 456).endObject() + .endObject().string(); + try { + parseQuery(query); + fail("parsing should have failed"); + } catch (ParsingException ex) { + assertThat(ex.getMessage(), equalTo("[terms] query does not support multiple fields")); + } + } +} + diff --git a/core/src/test/java/org/elasticsearch/index/query/TestParsingException.java b/core/src/test/java/org/elasticsearch/index/query/TestParsingException.java deleted file mode 100644 index 0c7a7546b08..00000000000 --- a/core/src/test/java/org/elasticsearch/index/query/TestParsingException.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.query; - -import org.elasticsearch.common.ParsingException; -import org.elasticsearch.index.Index; - -/** - * Class used to avoid dragging QueryContext into unit testing framework for - * basic exception handling - */ -public class TestParsingException extends ParsingException { - - public TestParsingException(Index index, int line, int col, String msg, Throwable cause) { - super(index, line, col, msg, cause); - } - - public TestParsingException(Index index, String msg, Throwable cause) { - super(index, UNKNOWN_POSITION, UNKNOWN_POSITION, msg, cause); - } -} diff --git a/core/src/test/java/org/elasticsearch/index/query/TypeQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/TypeQueryBuilderTests.java new file mode 100644 index 00000000000..af5c63c1e2b --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/query/TypeQueryBuilderTests.java @@ -0,0 +1,60 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query; + +import org.apache.lucene.search.ConstantScoreQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.TermQuery; +import org.elasticsearch.index.mapper.internal.TypeFieldMapper; +import org.junit.Test; + +import java.io.IOException; + +import static org.hamcrest.Matchers.*; + +public class TypeQueryBuilderTests extends AbstractQueryTestCase { + + @Override + protected TypeQueryBuilder doCreateTestQueryBuilder() { + return new TypeQueryBuilder(getRandomType()); + } + + @Override + protected void doAssertLuceneQuery(TypeQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { + assertThat(query, either(instanceOf(TermQuery.class)).or(instanceOf(ConstantScoreQuery.class))); + if (query instanceof ConstantScoreQuery) { + query = ((ConstantScoreQuery) query).getQuery(); + assertThat(query, instanceOf(TermQuery.class)); + } + TermQuery termQuery = (TermQuery) query; + assertThat(termQuery.getTerm().field(), equalTo(TypeFieldMapper.NAME)); + assertThat(termQuery.getTerm().text(), equalTo(queryBuilder.type())); + } + + @Test + public void testIllegalArgument() { + try { + new TypeQueryBuilder((String) null); + fail("cannot be null"); + } catch (IllegalArgumentException e) { + // expected + } + } +} diff --git a/core/src/test/java/org/elasticsearch/index/query/WildcardQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/WildcardQueryBuilderTests.java new file mode 100644 index 00000000000..83f274ec9f1 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/query/WildcardQueryBuilderTests.java @@ -0,0 +1,88 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query; + +import org.apache.lucene.search.Query; +import org.apache.lucene.search.WildcardQuery; +import org.junit.Test; + +import java.io.IOException; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; + +public class WildcardQueryBuilderTests extends AbstractQueryTestCase { + + @Override + protected WildcardQueryBuilder doCreateTestQueryBuilder() { + WildcardQueryBuilder query; + + // mapped or unmapped field + String text = randomAsciiOfLengthBetween(1, 10); + if (randomBoolean()) { + query = new WildcardQueryBuilder(STRING_FIELD_NAME, text); + } else { + query = new WildcardQueryBuilder(randomAsciiOfLengthBetween(1, 10), text); + } + if (randomBoolean()) { + query.rewrite(randomFrom(getRandomRewriteMethod())); + } + return query; + } + + @Override + protected void doAssertLuceneQuery(WildcardQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { + assertThat(query, instanceOf(WildcardQuery.class)); + WildcardQuery wildcardQuery = (WildcardQuery) query; + assertThat(wildcardQuery.getField(), equalTo(queryBuilder.fieldName())); + assertThat(wildcardQuery.getTerm().field(), equalTo(queryBuilder.fieldName())); + assertThat(wildcardQuery.getTerm().text(), equalTo(queryBuilder.value())); + } + + @Test + public void testIllegalArguments() { + try { + if (randomBoolean()) { + new WildcardQueryBuilder(null, "text"); + } else { + new WildcardQueryBuilder("", "text"); + } + fail("cannot be null or empty"); + } catch (IllegalArgumentException e) { + // expected + } + + try { + new WildcardQueryBuilder("field", null); + fail("cannot be null or empty"); + } catch (IllegalArgumentException e) { + // expected + } + } + + @Test + public void testEmptyValue() throws IOException { + QueryShardContext context = createShardContext(); + context.setAllowUnmappedFields(true); + + WildcardQueryBuilder wildcardQueryBuilder = new WildcardQueryBuilder(getRandomType(), ""); + assertEquals(wildcardQueryBuilder.toQuery(context).getClass(), WildcardQuery.class); + } +} diff --git a/core/src/test/java/org/elasticsearch/index/query/WrapperQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/WrapperQueryBuilderTests.java new file mode 100644 index 00000000000..ea04b79a6ef --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/query/WrapperQueryBuilderTests.java @@ -0,0 +1,107 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query; + +import org.apache.lucene.search.Query; +import org.elasticsearch.action.support.ToXContentToBytes; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; +import org.junit.Test; + +import java.io.IOException; + +import static org.hamcrest.Matchers.equalTo; + +public class WrapperQueryBuilderTests extends AbstractQueryTestCase { + + @Override + protected boolean supportsBoostAndQueryName() { + return false; + } + + @Override + protected WrapperQueryBuilder doCreateTestQueryBuilder() { + QueryBuilder wrappedQuery = RandomQueryBuilder.createQuery(random()); + switch (randomInt(2)) { + case 0: + return new WrapperQueryBuilder(wrappedQuery.toString()); + case 1: + return new WrapperQueryBuilder(((ToXContentToBytes)wrappedQuery).buildAsBytes().toBytes()); + case 2: + return new WrapperQueryBuilder(((ToXContentToBytes)wrappedQuery).buildAsBytes()); + default: + throw new UnsupportedOperationException(); + } + } + + @Override + protected void doAssertLuceneQuery(WrapperQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { + try (XContentParser qSourceParser = XContentFactory.xContent(queryBuilder.source()).createParser(queryBuilder.source())) { + final QueryShardContext contextCopy = new QueryShardContext(context.index(), context.indexQueryParserService()); + contextCopy.reset(qSourceParser); + QueryBuilder innerQuery = contextCopy.parseContext().parseInnerQueryBuilder(); + Query expected = innerQuery.toQuery(context); + assertThat(query, equalTo(expected)); + } + } + + @Override + protected void assertBoost(WrapperQueryBuilder queryBuilder, Query query) throws IOException { + //no-op boost is checked already above as part of doAssertLuceneQuery as we rely on lucene equals impl + } + + @Test + public void testIllegalArgument() { + try { + if (randomBoolean()) { + new WrapperQueryBuilder((byte[]) null); + } else { + new WrapperQueryBuilder(new byte[0]); + } + fail("cannot be null or empty"); + } catch (IllegalArgumentException e) { + // expected + } + + try { + if (randomBoolean()) { + new WrapperQueryBuilder((String) null); + } else { + new WrapperQueryBuilder(""); + } + fail("cannot be null or empty"); + } catch (IllegalArgumentException e) { + // expected + } + + try { + if (randomBoolean()) { + new WrapperQueryBuilder((BytesReference) null); + } else { + new WrapperQueryBuilder(new BytesArray(new byte[0])); + } + fail("cannot be null or empty"); + } catch (IllegalArgumentException e) { + // expected + } + } +} diff --git a/core/src/test/java/org/elasticsearch/index/query/functionscore/FieldValueFactorFunctionModifierTests.java b/core/src/test/java/org/elasticsearch/index/query/functionscore/FieldValueFactorFunctionModifierTests.java new file mode 100644 index 00000000000..4c30d14468b --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/query/functionscore/FieldValueFactorFunctionModifierTests.java @@ -0,0 +1,200 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query.functionscore; + +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.lucene.search.function.FieldValueFactorFunction; +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.Matchers.equalTo; + +public class FieldValueFactorFunctionModifierTests extends ESTestCase { + + public void testValidOrdinals() { + assertThat(FieldValueFactorFunction.Modifier.NONE.ordinal(), equalTo(0)); + assertThat(FieldValueFactorFunction.Modifier.LOG.ordinal(), equalTo(1)); + assertThat(FieldValueFactorFunction.Modifier.LOG1P.ordinal(), equalTo(2)); + assertThat(FieldValueFactorFunction.Modifier.LOG2P.ordinal(), equalTo(3)); + assertThat(FieldValueFactorFunction.Modifier.LN.ordinal(), equalTo(4)); + assertThat(FieldValueFactorFunction.Modifier.LN1P.ordinal(), equalTo(5)); + assertThat(FieldValueFactorFunction.Modifier.LN2P.ordinal(), equalTo(6)); + assertThat(FieldValueFactorFunction.Modifier.SQUARE.ordinal(), equalTo(7)); + assertThat(FieldValueFactorFunction.Modifier.SQRT.ordinal(), equalTo(8)); + assertThat(FieldValueFactorFunction.Modifier.RECIPROCAL.ordinal(), equalTo(9)); + } + + public void testWriteTo() throws Exception { + try (BytesStreamOutput out = new BytesStreamOutput()) { + FieldValueFactorFunction.Modifier.NONE.writeTo(out); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + assertThat(in.readVInt(), equalTo(0)); + } + } + + try (BytesStreamOutput out = new BytesStreamOutput()) { + FieldValueFactorFunction.Modifier.LOG.writeTo(out); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + assertThat(in.readVInt(), equalTo(1)); + } + } + + try (BytesStreamOutput out = new BytesStreamOutput()) { + FieldValueFactorFunction.Modifier.LOG1P.writeTo(out); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + assertThat(in.readVInt(), equalTo(2)); + } + } + + try (BytesStreamOutput out = new BytesStreamOutput()) { + FieldValueFactorFunction.Modifier.LOG2P.writeTo(out); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + assertThat(in.readVInt(), equalTo(3)); + } + } + + try (BytesStreamOutput out = new BytesStreamOutput()) { + FieldValueFactorFunction.Modifier.LN.writeTo(out); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + assertThat(in.readVInt(), equalTo(4)); + } + } + + try (BytesStreamOutput out = new BytesStreamOutput()) { + FieldValueFactorFunction.Modifier.LN1P.writeTo(out); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + assertThat(in.readVInt(), equalTo(5)); + } + } + + try (BytesStreamOutput out = new BytesStreamOutput()) { + FieldValueFactorFunction.Modifier.LN2P.writeTo(out); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + assertThat(in.readVInt(), equalTo(6)); + } + } + + try (BytesStreamOutput out = new BytesStreamOutput()) { + FieldValueFactorFunction.Modifier.SQUARE.writeTo(out); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + assertThat(in.readVInt(), equalTo(7)); + } + } + + try (BytesStreamOutput out = new BytesStreamOutput()) { + FieldValueFactorFunction.Modifier.SQRT.writeTo(out); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + assertThat(in.readVInt(), equalTo(8)); + } + } + + try (BytesStreamOutput out = new BytesStreamOutput()) { + FieldValueFactorFunction.Modifier.RECIPROCAL.writeTo(out); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + assertThat(in.readVInt(), equalTo(9)); + } + } + } + + public void testReadFrom() throws Exception { + try (BytesStreamOutput out = new BytesStreamOutput()) { + out.writeVInt(0); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + assertThat(FieldValueFactorFunction.Modifier.readModifierFrom(in), equalTo(FieldValueFactorFunction.Modifier.NONE)); + } + } + + try (BytesStreamOutput out = new BytesStreamOutput()) { + out.writeVInt(1); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + assertThat(FieldValueFactorFunction.Modifier.readModifierFrom(in), equalTo(FieldValueFactorFunction.Modifier.LOG)); + } + } + + try (BytesStreamOutput out = new BytesStreamOutput()) { + out.writeVInt(2); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + assertThat(FieldValueFactorFunction.Modifier.readModifierFrom(in), equalTo(FieldValueFactorFunction.Modifier.LOG1P)); + } + } + + try (BytesStreamOutput out = new BytesStreamOutput()) { + out.writeVInt(3); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + assertThat(FieldValueFactorFunction.Modifier.readModifierFrom(in), equalTo(FieldValueFactorFunction.Modifier.LOG2P)); + } + } + + try (BytesStreamOutput out = new BytesStreamOutput()) { + out.writeVInt(4); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + assertThat(FieldValueFactorFunction.Modifier.readModifierFrom(in), equalTo(FieldValueFactorFunction.Modifier.LN)); + } + } + + try (BytesStreamOutput out = new BytesStreamOutput()) { + out.writeVInt(5); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + assertThat(FieldValueFactorFunction.Modifier.readModifierFrom(in), equalTo(FieldValueFactorFunction.Modifier.LN1P)); + } + } + + try (BytesStreamOutput out = new BytesStreamOutput()) { + out.writeVInt(6); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + assertThat(FieldValueFactorFunction.Modifier.readModifierFrom(in), equalTo(FieldValueFactorFunction.Modifier.LN2P)); + } + } + + try (BytesStreamOutput out = new BytesStreamOutput()) { + out.writeVInt(7); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + assertThat(FieldValueFactorFunction.Modifier.readModifierFrom(in), equalTo(FieldValueFactorFunction.Modifier.SQUARE)); + } + } + + try (BytesStreamOutput out = new BytesStreamOutput()) { + out.writeVInt(8); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + assertThat(FieldValueFactorFunction.Modifier.readModifierFrom(in), equalTo(FieldValueFactorFunction.Modifier.SQRT)); + } + } + + try (BytesStreamOutput out = new BytesStreamOutput()) { + out.writeVInt(9); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + assertThat(FieldValueFactorFunction.Modifier.readModifierFrom(in), equalTo(FieldValueFactorFunction.Modifier.RECIPROCAL)); + } + } + } + + public void testFromString() { + assertThat(FieldValueFactorFunction.Modifier.fromString("none"), equalTo(FieldValueFactorFunction.Modifier.NONE)); + assertThat(FieldValueFactorFunction.Modifier.fromString("log"), equalTo(FieldValueFactorFunction.Modifier.LOG)); + assertThat(FieldValueFactorFunction.Modifier.fromString("log1p"), equalTo(FieldValueFactorFunction.Modifier.LOG1P)); + assertThat(FieldValueFactorFunction.Modifier.fromString("log2p"), equalTo(FieldValueFactorFunction.Modifier.LOG2P)); + assertThat(FieldValueFactorFunction.Modifier.fromString("ln"), equalTo(FieldValueFactorFunction.Modifier.LN)); + assertThat(FieldValueFactorFunction.Modifier.fromString("ln1p"), equalTo(FieldValueFactorFunction.Modifier.LN1P)); + assertThat(FieldValueFactorFunction.Modifier.fromString("ln2p"), equalTo(FieldValueFactorFunction.Modifier.LN2P)); + assertThat(FieldValueFactorFunction.Modifier.fromString("square"), equalTo(FieldValueFactorFunction.Modifier.SQUARE)); + assertThat(FieldValueFactorFunction.Modifier.fromString("sqrt"), equalTo(FieldValueFactorFunction.Modifier.SQRT)); + assertThat(FieldValueFactorFunction.Modifier.fromString("reciprocal"), equalTo(FieldValueFactorFunction.Modifier.RECIPROCAL)); + } +} diff --git a/core/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilderTests.java new file mode 100644 index 00000000000..58193ab40d3 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilderTests.java @@ -0,0 +1,596 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query.functionscore; + +import com.fasterxml.jackson.core.JsonParseException; +import org.apache.lucene.index.Term; +import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.TermQuery; +import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.lucene.search.function.*; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.query.*; +import org.elasticsearch.index.query.functionscore.exp.ExponentialDecayFunctionBuilder; +import org.elasticsearch.index.query.functionscore.fieldvaluefactor.FieldValueFactorFunctionBuilder; +import org.elasticsearch.index.query.functionscore.gauss.GaussDecayFunctionBuilder; +import org.elasticsearch.index.query.functionscore.lin.LinearDecayFunctionBuilder; +import org.elasticsearch.index.query.functionscore.random.RandomScoreFunctionBuilder; +import org.elasticsearch.index.query.functionscore.script.ScriptScoreFunctionBuilder; +import org.elasticsearch.index.query.functionscore.weight.WeightBuilder; +import org.elasticsearch.script.MockScriptEngine; +import org.elasticsearch.script.Script; +import org.elasticsearch.script.ScriptService; +import org.elasticsearch.search.MultiValueMode; +import org.junit.Test; + +import java.io.IOException; +import java.util.Collections; +import java.util.Map; + +import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.elasticsearch.index.query.QueryBuilders.functionScoreQuery; +import static org.elasticsearch.index.query.QueryBuilders.termQuery; +import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; +import static org.hamcrest.Matchers.*; + +public class FunctionScoreQueryBuilderTests extends AbstractQueryTestCase { + + @Override + protected FunctionScoreQueryBuilder doCreateTestQueryBuilder() { + FunctionScoreQueryBuilder functionScoreQueryBuilder; + switch(randomIntBetween(0, 3)) { + case 0: + int numFunctions = randomIntBetween(0, 3); + FunctionScoreQueryBuilder.FilterFunctionBuilder[] filterFunctionBuilders = new FunctionScoreQueryBuilder.FilterFunctionBuilder[numFunctions]; + for (int i = 0; i < numFunctions; i++) { + filterFunctionBuilders[i] = new FunctionScoreQueryBuilder.FilterFunctionBuilder(RandomQueryBuilder.createQuery(random()), randomScoreFunction()); + } + if (randomBoolean()) { + functionScoreQueryBuilder = new FunctionScoreQueryBuilder(RandomQueryBuilder.createQuery(random()), filterFunctionBuilders); + } else { + functionScoreQueryBuilder = new FunctionScoreQueryBuilder(filterFunctionBuilders); + } + break; + case 1: + functionScoreQueryBuilder = new FunctionScoreQueryBuilder(randomScoreFunction()); + break; + case 2: + functionScoreQueryBuilder = new FunctionScoreQueryBuilder(RandomQueryBuilder.createQuery(random()), randomScoreFunction()); + break; + case 3: + functionScoreQueryBuilder = new FunctionScoreQueryBuilder(RandomQueryBuilder.createQuery(random())); + break; + default: + throw new UnsupportedOperationException(); + } + + if (randomBoolean()) { + functionScoreQueryBuilder.boostMode(randomFrom(CombineFunction.values())); + } + if (randomBoolean()) { + functionScoreQueryBuilder.scoreMode(randomFrom(FiltersFunctionScoreQuery.ScoreMode.values())); + } + if (randomBoolean()) { + functionScoreQueryBuilder.maxBoost(randomFloat()); + } + if (randomBoolean()) { + functionScoreQueryBuilder.setMinScore(randomFloat()); + } + return functionScoreQueryBuilder; + } + + private static ScoreFunctionBuilder randomScoreFunction() { + if (randomBoolean()) { + return new WeightBuilder().setWeight(randomFloat()); + } + ScoreFunctionBuilder functionBuilder; + //TODO random score function is temporarily disabled, it causes NPE in testToQuery when trying to access the shardId through SearchContext + switch (randomIntBetween(0, 2)) { + case 0: + DecayFunctionBuilder decayFunctionBuilder; + Float offset = randomBoolean() ? null : randomFloat(); + double decay = randomDouble(); + switch(randomIntBetween(0, 2)) { + case 0: + decayFunctionBuilder = new GaussDecayFunctionBuilder(INT_FIELD_NAME, randomFloat(), randomFloat(), offset, decay); + break; + case 1: + decayFunctionBuilder = new ExponentialDecayFunctionBuilder(INT_FIELD_NAME, randomFloat(), randomFloat(), offset, decay); + break; + case 2: + decayFunctionBuilder = new LinearDecayFunctionBuilder(INT_FIELD_NAME, randomFloat(), randomFloat(), offset, decay); + break; + default: + throw new UnsupportedOperationException(); + } + if (randomBoolean()) { + decayFunctionBuilder.setMultiValueMode(randomFrom(MultiValueMode.values())); + } + functionBuilder = decayFunctionBuilder; + break; + case 1: + FieldValueFactorFunctionBuilder fieldValueFactorFunctionBuilder = new FieldValueFactorFunctionBuilder(INT_FIELD_NAME); + if (randomBoolean()) { + fieldValueFactorFunctionBuilder.factor(randomFloat()); + } + if (randomBoolean()) { + fieldValueFactorFunctionBuilder.missing(randomDouble()); + } + if (randomBoolean()) { + fieldValueFactorFunctionBuilder.modifier(randomFrom(FieldValueFactorFunction.Modifier.values())); + } + functionBuilder = fieldValueFactorFunctionBuilder; + break; + case 2: + String script = "5"; + Map params = Collections.emptyMap(); + functionBuilder = new ScriptScoreFunctionBuilder(new Script(script, ScriptService.ScriptType.INLINE, MockScriptEngine.NAME, params)); + break; + case 3: + RandomScoreFunctionBuilder randomScoreFunctionBuilder = new RandomScoreFunctionBuilder(); + if (randomBoolean()) { + randomScoreFunctionBuilder.seed(randomLong()); + } else if(randomBoolean()) { + randomScoreFunctionBuilder.seed(randomInt()); + } else { + randomScoreFunctionBuilder.seed(randomAsciiOfLengthBetween(1, 10)); + } + functionBuilder = randomScoreFunctionBuilder; + break; + default: + throw new UnsupportedOperationException(); + } + if (randomBoolean()) { + functionBuilder.setWeight(randomFloat()); + } + return functionBuilder; + } + + @Override + protected void doAssertLuceneQuery(FunctionScoreQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { + assertThat(query, either(instanceOf(FunctionScoreQuery.class)).or(instanceOf(FiltersFunctionScoreQuery.class))); + } + + /** + * Overridden here to ensure the test is only run if at least one type is + * present in the mappings. Functions require the field to be + * explicitly mapped + */ + @Override + public void testToQuery() throws IOException { + assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); + super.testToQuery(); + } + + @Test + public void testIllegalArguments() { + try { + new FunctionScoreQueryBuilder((QueryBuilder)null); + fail("must not be null"); + } catch(IllegalArgumentException e) { + //all good + } + + try { + new FunctionScoreQueryBuilder((ScoreFunctionBuilder)null); + fail("must not be null"); + } catch(IllegalArgumentException e) { + //all good + } + + try { + new FunctionScoreQueryBuilder((FunctionScoreQueryBuilder.FilterFunctionBuilder[])null); + fail("must not be null"); + } catch(IllegalArgumentException e) { + //all good + } + + try { + new FunctionScoreQueryBuilder(null, ScoreFunctionBuilders.randomFunction(123)); + fail("must not be null"); + } catch(IllegalArgumentException e) { + //all good + } + + try { + new FunctionScoreQueryBuilder(new MatchAllQueryBuilder(), (ScoreFunctionBuilder)null); + fail("must not be null"); + } catch(IllegalArgumentException e) { + //all good + } + + try { + new FunctionScoreQueryBuilder(new MatchAllQueryBuilder(), (FunctionScoreQueryBuilder.FilterFunctionBuilder[])null); + fail("must not be null"); + } catch(IllegalArgumentException e) { + //all good + } + + try { + new FunctionScoreQueryBuilder(null, new FunctionScoreQueryBuilder.FilterFunctionBuilder[0]); + fail("must not be null"); + } catch(IllegalArgumentException e) { + //all good + } + + try { + new FunctionScoreQueryBuilder(QueryBuilders.matchAllQuery(), new FunctionScoreQueryBuilder.FilterFunctionBuilder[]{null}); + fail("content of array must not be null"); + } catch(IllegalArgumentException e) { + //all good + } + + try { + new FunctionScoreQueryBuilder.FilterFunctionBuilder(null); + fail("must not be null"); + } catch(IllegalArgumentException e) { + //all good + } + + try { + new FunctionScoreQueryBuilder.FilterFunctionBuilder(null, ScoreFunctionBuilders.randomFunction(123)); + fail("must not be null"); + } catch(IllegalArgumentException e) { + //all good + } + + try { + new FunctionScoreQueryBuilder.FilterFunctionBuilder(new MatchAllQueryBuilder(), null); + fail("must not be null"); + } catch(IllegalArgumentException e) { + //all good + } + + try { + new FunctionScoreQueryBuilder(new MatchAllQueryBuilder()).scoreMode(null); + fail("must not be null"); + } catch(IllegalArgumentException e) { + //all good + } + + try { + new FunctionScoreQueryBuilder(new MatchAllQueryBuilder()).boostMode(null); + fail("must not be null"); + } catch(IllegalArgumentException e) { + //all good + } + } + + @Test + public void testParseFunctionsArray() throws IOException { + String functionScoreQuery = "{\n" + + " \"function_score\":{\n" + + " \"query\":{\n" + + " \"term\":{\n" + + " \"field1\":\"value1\"\n" + + " }\n" + + " },\n" + + " \"functions\": [\n" + + " {\n" + + " \"random_score\": {\n" + + " \"seed\":123456\n" + + " },\n" + + " \"weight\": 3,\n" + + " \"filter\": {\n" + + " \"term\":{\n" + + " \"field2\":\"value2\"\n" + + " }\n" + + " }\n" + + " },\n" + + " {\n" + + " \"filter\": {\n" + + " \"term\":{\n" + + " \"field3\":\"value3\"\n" + + " }\n" + + " },\n" + + " \"weight\": 9\n" + + " },\n" + + " {\n" + + " \"gauss\": {\n" + + " \"field_name\": {\n" + + " \"origin\":0.5,\n" + + " \"scale\":0.6\n" + + " }\n" + + " }\n" + + " }\n" + + " ],\n" + + " \"boost\" : 3,\n" + + " \"score_mode\" : \"avg\",\n" + + " \"boost_mode\" : \"replace\",\n" + + " \"max_boost\" : 10\n" + + " }\n" + + "}"; + + QueryBuilder queryBuilder = parseQuery(functionScoreQuery); + //given that we copy part of the decay functions as bytes, we test that fromXContent and toXContent both work no matter what the initial format was + for (int i = 0; i <= XContentType.values().length; i++) { + assertThat(queryBuilder, instanceOf(FunctionScoreQueryBuilder.class)); + FunctionScoreQueryBuilder functionScoreQueryBuilder = (FunctionScoreQueryBuilder) queryBuilder; + assertThat(functionScoreQueryBuilder.query(), instanceOf(TermQueryBuilder.class)); + TermQueryBuilder termQueryBuilder = (TermQueryBuilder) functionScoreQueryBuilder.query(); + assertThat(termQueryBuilder.fieldName(), equalTo("field1")); + assertThat(termQueryBuilder.value(), equalTo("value1")); + assertThat(functionScoreQueryBuilder.filterFunctionBuilders().length, equalTo(3)); + assertThat(functionScoreQueryBuilder.filterFunctionBuilders()[0].getFilter(), instanceOf(TermQueryBuilder.class)); + termQueryBuilder = (TermQueryBuilder) functionScoreQueryBuilder.filterFunctionBuilders()[0].getFilter(); + assertThat(termQueryBuilder.fieldName(), equalTo("field2")); + assertThat(termQueryBuilder.value(), equalTo("value2")); + assertThat(functionScoreQueryBuilder.filterFunctionBuilders()[1].getFilter(), instanceOf(TermQueryBuilder.class)); + termQueryBuilder = (TermQueryBuilder) functionScoreQueryBuilder.filterFunctionBuilders()[1].getFilter(); + assertThat(termQueryBuilder.fieldName(), equalTo("field3")); + assertThat(termQueryBuilder.value(), equalTo("value3")); + assertThat(functionScoreQueryBuilder.filterFunctionBuilders()[2].getFilter(), instanceOf(MatchAllQueryBuilder.class)); + assertThat(functionScoreQueryBuilder.filterFunctionBuilders()[0].getScoreFunction(), instanceOf(RandomScoreFunctionBuilder.class)); + RandomScoreFunctionBuilder randomScoreFunctionBuilder = (RandomScoreFunctionBuilder) functionScoreQueryBuilder.filterFunctionBuilders()[0].getScoreFunction(); + assertThat(randomScoreFunctionBuilder.getSeed(), equalTo(123456)); + assertThat(randomScoreFunctionBuilder.getWeight(), equalTo(3f)); + assertThat(functionScoreQueryBuilder.filterFunctionBuilders()[1].getScoreFunction(), instanceOf(WeightBuilder.class)); + WeightBuilder weightBuilder = (WeightBuilder) functionScoreQueryBuilder.filterFunctionBuilders()[1].getScoreFunction(); + assertThat(weightBuilder.getWeight(), equalTo(9f)); + assertThat(functionScoreQueryBuilder.filterFunctionBuilders()[2].getScoreFunction(), instanceOf(GaussDecayFunctionBuilder.class)); + GaussDecayFunctionBuilder gaussDecayFunctionBuilder = (GaussDecayFunctionBuilder) functionScoreQueryBuilder.filterFunctionBuilders()[2].getScoreFunction(); + assertThat(gaussDecayFunctionBuilder.getFieldName(), equalTo("field_name")); + assertThat(functionScoreQueryBuilder.boost(), equalTo(3f)); + assertThat(functionScoreQueryBuilder.scoreMode(), equalTo(FiltersFunctionScoreQuery.ScoreMode.AVG)); + assertThat(functionScoreQueryBuilder.boostMode(), equalTo(CombineFunction.REPLACE)); + assertThat(functionScoreQueryBuilder.maxBoost(), equalTo(10f)); + + if (i < XContentType.values().length) { + queryBuilder = parseQuery(((AbstractQueryBuilder)queryBuilder).buildAsBytes(XContentType.values()[i])); + } + } + } + + @Test + public void testParseSingleFunction() throws IOException { + String functionScoreQuery = "{\n" + + " \"function_score\":{\n" + + " \"query\":{\n" + + " \"term\":{\n" + + " \"field1\":\"value1\"\n" + + " }\n" + + " },\n" + + " \"gauss\": {\n" + + " \"field_name\": {\n" + + " \"origin\":0.5,\n" + + " \"scale\":0.6\n" + + " }\n" + + " },\n" + + " \"boost\" : 3,\n" + + " \"score_mode\" : \"avg\",\n" + + " \"boost_mode\" : \"replace\",\n" + + " \"max_boost\" : 10\n" + + " }\n" + + "}"; + + QueryBuilder queryBuilder = parseQuery(functionScoreQuery); + //given that we copy part of the decay functions as bytes, we test that fromXContent and toXContent both work no matter what the initial format was + for (int i = 0; i <= XContentType.values().length; i++) { + assertThat(queryBuilder, instanceOf(FunctionScoreQueryBuilder.class)); + FunctionScoreQueryBuilder functionScoreQueryBuilder = (FunctionScoreQueryBuilder) queryBuilder; + assertThat(functionScoreQueryBuilder.query(), instanceOf(TermQueryBuilder.class)); + TermQueryBuilder termQueryBuilder = (TermQueryBuilder) functionScoreQueryBuilder.query(); + assertThat(termQueryBuilder.fieldName(), equalTo("field1")); + assertThat(termQueryBuilder.value(), equalTo("value1")); + assertThat(functionScoreQueryBuilder.filterFunctionBuilders().length, equalTo(1)); + assertThat(functionScoreQueryBuilder.filterFunctionBuilders()[0].getFilter(), instanceOf(MatchAllQueryBuilder.class)); + assertThat(functionScoreQueryBuilder.filterFunctionBuilders()[0].getScoreFunction(), instanceOf(GaussDecayFunctionBuilder.class)); + GaussDecayFunctionBuilder gaussDecayFunctionBuilder = (GaussDecayFunctionBuilder) functionScoreQueryBuilder.filterFunctionBuilders()[0].getScoreFunction(); + assertThat(gaussDecayFunctionBuilder.getFieldName(), equalTo("field_name")); + assertThat(gaussDecayFunctionBuilder.getWeight(), nullValue()); + assertThat(functionScoreQueryBuilder.boost(), equalTo(3f)); + assertThat(functionScoreQueryBuilder.scoreMode(), equalTo(FiltersFunctionScoreQuery.ScoreMode.AVG)); + assertThat(functionScoreQueryBuilder.boostMode(), equalTo(CombineFunction.REPLACE)); + assertThat(functionScoreQueryBuilder.maxBoost(), equalTo(10f)); + + if (i < XContentType.values().length) { + queryBuilder = parseQuery(((AbstractQueryBuilder)queryBuilder).buildAsBytes(XContentType.values()[i])); + } + } + } + + @Test + public void testProperErrorMessageWhenTwoFunctionsDefinedInQueryBody() throws IOException { + //without a functions array, we support only a single function, weight can't be associated with the function either. + String functionScoreQuery = "{\n" + + " \"function_score\": {\n" + + " \"script_score\": {\n" + + " \"script\": \"5\"\n" + + " },\n" + + " \"weight\": 2\n" + + " }\n" + + "}"; + try { + parseQuery(functionScoreQuery); + fail("parsing should have failed"); + } catch(ParsingException e) { + assertThat(e.getMessage(), containsString("use [functions] array if you want to define several functions.")); + } + } + + @Test + public void testProperErrorMessageWhenTwoFunctionsDefinedInFunctionsArray() throws IOException { + String functionScoreQuery = "{\n" + + " \"function_score\":{\n" + + " \"functions\": [\n" + + " {\n" + + " \"random_score\": {\n" + + " \"seed\":123456\n" + + " },\n" + + " \"weight\": 3,\n" + + " \"script_score\": {\n" + + " \"script\": \"_index['text']['foo'].tf()\"\n" + + " },\n" + + " \"filter\": {\n" + + " \"term\":{\n" + + " \"field2\":\"value2\"\n" + + " }\n" + + " }\n" + + " }\n" + + " ]\n" + + " }\n" + + "}"; + + try { + parseQuery(functionScoreQuery); + fail("parsing should have failed"); + } catch(ParsingException e) { + assertThat(e.getMessage(), containsString("failed to parse function_score functions. already found [random_score], now encountering [script_score].")); + } + } + + @Test + public void testProperErrorMessageWhenMissingFunction() throws IOException { + String functionScoreQuery = "{\n" + + " \"function_score\":{\n" + + " \"functions\": [\n" + + " {\n" + + " \"filter\": {\n" + + " \"term\":{\n" + + " \"field2\":\"value2\"\n" + + " }\n" + + " }\n" + + " }\n" + + " ]\n" + + " }\n" + + "}"; + try { + parseQuery(functionScoreQuery); + fail("parsing should have failed"); + } catch(ParsingException e) { + assertThat(e.getMessage(), containsString("an entry in functions list is missing a function.")); + } + } + + @Test + public void testWeight1fStillProducesWeightFunction() throws IOException { + assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); + String queryString = jsonBuilder().startObject() + .startObject("function_score") + .startArray("functions") + .startObject() + .startObject("field_value_factor") + .field("field", INT_FIELD_NAME) + .endObject() + .field("weight", 1.0) + .endObject() + .endArray() + .endObject() + .endObject().string(); + QueryBuilder query = parseQuery(queryString); + assertThat(query, instanceOf(FunctionScoreQueryBuilder.class)); + FunctionScoreQueryBuilder functionScoreQueryBuilder = (FunctionScoreQueryBuilder) query; + assertThat(functionScoreQueryBuilder.filterFunctionBuilders()[0].getScoreFunction(), instanceOf(FieldValueFactorFunctionBuilder.class)); + FieldValueFactorFunctionBuilder fieldValueFactorFunctionBuilder = (FieldValueFactorFunctionBuilder) functionScoreQueryBuilder.filterFunctionBuilders()[0].getScoreFunction(); + assertThat(fieldValueFactorFunctionBuilder.fieldName(), equalTo(INT_FIELD_NAME)); + assertThat(fieldValueFactorFunctionBuilder.factor(), equalTo(FieldValueFactorFunctionBuilder.DEFAULT_FACTOR)); + assertThat(fieldValueFactorFunctionBuilder.modifier(), equalTo(FieldValueFactorFunctionBuilder.DEFAULT_MODIFIER)); + assertThat(fieldValueFactorFunctionBuilder.getWeight(), equalTo(1f)); + assertThat(fieldValueFactorFunctionBuilder.missing(), nullValue()); + + Query luceneQuery = query.toQuery(createShardContext()); + assertThat(luceneQuery, instanceOf(FunctionScoreQuery.class)); + FunctionScoreQuery functionScoreQuery = (FunctionScoreQuery) luceneQuery; + assertThat(functionScoreQuery.getFunction(), instanceOf(WeightFactorFunction.class)); + WeightFactorFunction weightFactorFunction = (WeightFactorFunction) functionScoreQuery.getFunction(); + assertThat(weightFactorFunction.getWeight(), equalTo(1.0f)); + assertThat(weightFactorFunction.getScoreFunction(), instanceOf(FieldValueFactorFunction.class)); + } + + @Test + public void testProperErrorMessagesForMisplacedWeightsAndFunctions() throws IOException { + String query = jsonBuilder().startObject().startObject("function_score") + .startArray("functions") + .startObject().startObject("script_score").field("script", "3").endObject().endObject() + .endArray() + .field("weight", 2) + .endObject().endObject().string(); + try { + parseQuery(query); + fail("Expect exception here because array of functions and one weight in body is not allowed."); + } catch (ParsingException e) { + assertThat(e.getMessage(), containsString("you can either define [functions] array or a single function, not both. already found [functions] array, now encountering [weight].")); + } + query = jsonBuilder().startObject().startObject("function_score") + .field("weight", 2) + .startArray("functions") + .startObject().endObject() + .endArray() + .endObject().endObject().string(); + try { + parseQuery(query); + fail("Expect exception here because array of functions and one weight in body is not allowed."); + } catch (ParsingException e) { + assertThat(e.getMessage(), containsString("you can either define [functions] array or a single function, not both. already found [weight], now encountering [functions].")); + } + } + + @Test(expected = JsonParseException.class) + public void ensureMalformedThrowsException() throws IOException { + parseQuery(copyToStringFromClasspath("/org/elasticsearch/index/query/faulty-function-score-query.json")); + } + + @Test + public void testCustomWeightFactorQueryBuilder_withFunctionScore() throws IOException { + Query parsedQuery = parseQuery(functionScoreQuery(termQuery("name.last", "banon"), ScoreFunctionBuilders.weightFactorFunction(1.3f)).buildAsBytes()).toQuery(createShardContext()); + assertThat(parsedQuery, instanceOf(FunctionScoreQuery.class)); + FunctionScoreQuery functionScoreQuery = (FunctionScoreQuery) parsedQuery; + assertThat(((TermQuery) functionScoreQuery.getSubQuery()).getTerm(), equalTo(new Term("name.last", "banon"))); + assertThat((double) ((WeightFactorFunction) functionScoreQuery.getFunction()).getWeight(), closeTo(1.3, 0.001)); + } + + @Test + public void testCustomWeightFactorQueryBuilder_withFunctionScoreWithoutQueryGiven() throws IOException { + Query parsedQuery = parseQuery(functionScoreQuery(ScoreFunctionBuilders.weightFactorFunction(1.3f)).buildAsBytes()).toQuery(createShardContext()); + assertThat(parsedQuery, instanceOf(FunctionScoreQuery.class)); + FunctionScoreQuery functionScoreQuery = (FunctionScoreQuery) parsedQuery; + assertThat(functionScoreQuery.getSubQuery() instanceof MatchAllDocsQuery, equalTo(true)); + assertThat((double) ((WeightFactorFunction) functionScoreQuery.getFunction()).getWeight(), closeTo(1.3, 0.001)); + } + + @Test + public void testFieldValueFactorFactorArray() throws IOException { + // don't permit an array of factors + String querySource = "{" + + "\"query\": {" + + " \"function_score\": {" + + " \"query\": {" + + " \"match\": {\"name\": \"foo\"}" + + " }," + + " \"functions\": [" + + " {" + + " \"field_value_factor\": {" + + " \"field\": \"test\"," + + " \"factor\": [1.2,2]" + + " }" + + " }" + + " ]" + + " }" + + " }" + + "}"; + try { + parseQuery(querySource); + fail("parsing should have failed"); + } catch(ParsingException e) { + assertThat(e.getMessage(), containsString("[field_value_factor] field 'factor' does not support lists or objects")); + } + } +} diff --git a/core/src/test/java/org/elasticsearch/index/query/functionscore/ScoreFunctionBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/functionscore/ScoreFunctionBuilderTests.java new file mode 100644 index 00000000000..30c2944a5a0 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/query/functionscore/ScoreFunctionBuilderTests.java @@ -0,0 +1,146 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query.functionscore; + +import org.elasticsearch.index.query.functionscore.exp.ExponentialDecayFunctionBuilder; +import org.elasticsearch.index.query.functionscore.fieldvaluefactor.FieldValueFactorFunctionBuilder; +import org.elasticsearch.index.query.functionscore.gauss.GaussDecayFunctionBuilder; +import org.elasticsearch.index.query.functionscore.lin.LinearDecayFunctionBuilder; +import org.elasticsearch.index.query.functionscore.random.RandomScoreFunctionBuilder; +import org.elasticsearch.index.query.functionscore.script.ScriptScoreFunctionBuilder; +import org.elasticsearch.test.ESTestCase; + +public class ScoreFunctionBuilderTests extends ESTestCase { + + public void testIllegalArguments() { + try { + new RandomScoreFunctionBuilder().seed(null); + fail("must not be null"); + } catch(IllegalArgumentException e) { + //all good + } + + try { + new ScriptScoreFunctionBuilder(null); + fail("must not be null"); + } catch(IllegalArgumentException e) { + //all good + } + + try { + new FieldValueFactorFunctionBuilder(null); + fail("must not be null"); + } catch(IllegalArgumentException e) { + //all good + } + + try { + new FieldValueFactorFunctionBuilder("").modifier(null); + fail("must not be null"); + } catch(IllegalArgumentException e) { + //all good + } + + try { + new GaussDecayFunctionBuilder(null, "", "", ""); + fail("must not be null"); + } catch(IllegalArgumentException e) { + //all good + } + + try { + new GaussDecayFunctionBuilder("", "", null, ""); + fail("must not be null"); + } catch(IllegalArgumentException e) { + //all good + } + + try { + new GaussDecayFunctionBuilder("", "", null, "", randomIntBetween(1, 100)); + fail("must not be null"); + } catch(IllegalArgumentException e) { + //all good + } + + try { + new GaussDecayFunctionBuilder("", "", null, "", randomIntBetween(-100, -1)); + fail("must not be null"); + } catch(IllegalArgumentException e) { + //all good + } + + try { + new LinearDecayFunctionBuilder(null, "", "", ""); + fail("must not be null"); + } catch(IllegalArgumentException e) { + //all good + } + + try { + new LinearDecayFunctionBuilder("", "", null, ""); + fail("must not be null"); + } catch(IllegalArgumentException e) { + //all good + } + + try { + new LinearDecayFunctionBuilder("", "", null, "", randomIntBetween(1, 100)); + fail("must not be null"); + } catch(IllegalArgumentException e) { + //all good + } + + try { + new LinearDecayFunctionBuilder("", "", null, "", randomIntBetween(-100, -1)); + fail("must not be null"); + } catch(IllegalArgumentException e) { + //all good + } + + try { + new ExponentialDecayFunctionBuilder(null, "", "", ""); + fail("must not be null"); + } catch(IllegalArgumentException e) { + //all good + } + + try { + new ExponentialDecayFunctionBuilder("", "", null, ""); + fail("must not be null"); + } catch(IllegalArgumentException e) { + //all good + } + + try { + new ExponentialDecayFunctionBuilder("", "", null, "", randomIntBetween(1, 100)); + fail("must not be null"); + } catch(IllegalArgumentException e) { + //all good + } + + try { + new ExponentialDecayFunctionBuilder("", "", null, "", randomIntBetween(-100, -1)); + fail("must not be null"); + } catch(IllegalArgumentException e) { + //all good + } + + } +} diff --git a/core/src/test/java/org/elasticsearch/index/query/plugin/CustomQueryParserIT.java b/core/src/test/java/org/elasticsearch/index/query/plugin/CustomQueryParserIT.java index 1f0743abd96..c015052ffbb 100644 --- a/core/src/test/java/org/elasticsearch/index/query/plugin/CustomQueryParserIT.java +++ b/core/src/test/java/org/elasticsearch/index/query/plugin/CustomQueryParserIT.java @@ -23,7 +23,6 @@ import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.Query; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.IndexQueryParserService; import org.elasticsearch.indices.IndicesService; @@ -78,7 +77,7 @@ public class CustomQueryParserIT extends ESIntegTestCase { @Test //see #11120 public void testConstantScoreParsesFilter() throws Exception { IndexQueryParserService queryParser = queryParser(); - Query q = queryParser.parse(constantScoreQuery(new DummyQueryParserPlugin.DummyQueryBuilder())).query(); + Query q = constantScoreQuery(new DummyQueryParserPlugin.DummyQueryBuilder()).toQuery(queryParser.getShardContext()); Query inner = ((ConstantScoreQuery) q).getQuery(); assertThat(inner, instanceOf(DummyQueryParserPlugin.DummyQuery.class)); assertEquals(true, ((DummyQueryParserPlugin.DummyQuery) inner).isFilter); @@ -88,11 +87,11 @@ public class CustomQueryParserIT extends ESIntegTestCase { public void testBooleanParsesFilter() throws Exception { IndexQueryParserService queryParser = queryParser(); // single clause, serialized as inner object - Query q = queryParser.parse(boolQuery() + Query q = boolQuery() .should(new DummyQueryParserPlugin.DummyQueryBuilder()) .must(new DummyQueryParserPlugin.DummyQueryBuilder()) .filter(new DummyQueryParserPlugin.DummyQueryBuilder()) - .mustNot(new DummyQueryParserPlugin.DummyQueryBuilder())).query(); + .mustNot(new DummyQueryParserPlugin.DummyQueryBuilder()).toQuery(queryParser.getShardContext()); assertThat(q, instanceOf(BooleanQuery.class)); BooleanQuery bq = (BooleanQuery) q; assertEquals(4, bq.clauses().size()); @@ -113,11 +112,11 @@ public class CustomQueryParserIT extends ESIntegTestCase { } // multiple clauses, serialized as inner arrays - q = queryParser.parse(boolQuery() + q = boolQuery() .should(new DummyQueryParserPlugin.DummyQueryBuilder()).should(new DummyQueryParserPlugin.DummyQueryBuilder()) .must(new DummyQueryParserPlugin.DummyQueryBuilder()).must(new DummyQueryParserPlugin.DummyQueryBuilder()) .filter(new DummyQueryParserPlugin.DummyQueryBuilder()).filter(new DummyQueryParserPlugin.DummyQueryBuilder()) - .mustNot(new DummyQueryParserPlugin.DummyQueryBuilder()).mustNot(new DummyQueryParserPlugin.DummyQueryBuilder())).query(); + .mustNot(new DummyQueryParserPlugin.DummyQueryBuilder()).mustNot(new DummyQueryParserPlugin.DummyQueryBuilder()).toQuery(queryParser.getShardContext()); assertThat(q, instanceOf(BooleanQuery.class)); bq = (BooleanQuery) q; assertEquals(8, bq.clauses().size()); diff --git a/core/src/test/java/org/elasticsearch/index/query/plugin/DummyQueryParserPlugin.java b/core/src/test/java/org/elasticsearch/index/query/plugin/DummyQueryParserPlugin.java index 0ad8b53f9b4..432c833aef2 100644 --- a/core/src/test/java/org/elasticsearch/index/query/plugin/DummyQueryParserPlugin.java +++ b/core/src/test/java/org/elasticsearch/index/query/plugin/DummyQueryParserPlugin.java @@ -23,12 +23,11 @@ import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.Weight; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.index.query.QueryParseContext; -import org.elasticsearch.index.query.QueryParser; -import org.elasticsearch.common.ParsingException; +import org.elasticsearch.index.query.*; import org.elasticsearch.indices.IndicesModule; import org.elasticsearch.plugins.Plugin; @@ -50,24 +49,61 @@ public class DummyQueryParserPlugin extends Plugin { module.registerQueryParser(DummyQueryParser.class); } - public static class DummyQueryBuilder extends QueryBuilder { + public static class DummyQueryBuilder extends AbstractQueryBuilder { + private static final String NAME = "dummy"; + @Override protected void doXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject("dummy").endObject(); + builder.startObject(NAME).endObject(); + } + + @Override + protected Query doToQuery(QueryShardContext context) throws IOException { + return new DummyQuery(context.isFilter()); + } + + @Override + protected DummyQueryBuilder doReadFrom(StreamInput in) throws IOException { + return null; + } + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + + } + + @Override + protected boolean doEquals(DummyQueryBuilder other) { + return false; + } + + @Override + protected int doHashCode() { + return 0; + } + + @Override + public String getWriteableName() { + return NAME; } } - public static class DummyQueryParser implements QueryParser { + public static class DummyQueryParser implements QueryParser { @Override public String[] names() { - return new String[]{"dummy"}; + return new String[]{DummyQueryBuilder.NAME}; } @Override - public Query parse(QueryParseContext parseContext) throws IOException, ParsingException { + public DummyQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException { XContentParser.Token token = parseContext.parser().nextToken(); assert token == XContentParser.Token.END_OBJECT; - return new DummyQuery(parseContext.isFilter()); + return new DummyQueryBuilder(); + } + + @Override + public DummyQueryBuilder getBuilderPrototype() { + return new DummyQueryBuilder(); } } diff --git a/core/src/test/java/org/elasticsearch/index/query/support/QueryInnerHitsTests.java b/core/src/test/java/org/elasticsearch/index/query/support/QueryInnerHitsTests.java new file mode 100644 index 00000000000..2c4e3171932 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/query/support/QueryInnerHitsTests.java @@ -0,0 +1,80 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.index.query.support; + +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.search.fetch.innerhits.InnerHitsBuilder; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; + +public class QueryInnerHitsTests extends ESTestCase { + + public void testSerialize() throws IOException { + copyAndAssert(new QueryInnerHits()); + copyAndAssert(new QueryInnerHits("foo", new InnerHitsBuilder.InnerHit())); + copyAndAssert(new QueryInnerHits("foo", null)); + copyAndAssert(new QueryInnerHits("foo", new InnerHitsBuilder.InnerHit().setSize(randomIntBetween(0, 100)))); + } + + public void testToXContent() throws IOException { + assertJson("{\"inner_hits\":{}}", new QueryInnerHits()); + assertJson("{\"inner_hits\":{\"name\":\"foo\"}}", new QueryInnerHits("foo", new InnerHitsBuilder.InnerHit())); + assertJson("{\"inner_hits\":{\"name\":\"bar\"}}", new QueryInnerHits("bar", null)); + assertJson("{\"inner_hits\":{\"name\":\"foo\",\"size\":42}}", new QueryInnerHits("foo", new InnerHitsBuilder.InnerHit().setSize(42))); + assertJson("{\"inner_hits\":{\"name\":\"boom\",\"from\":66,\"size\":666}}", new QueryInnerHits("boom", new InnerHitsBuilder.InnerHit().setFrom(66).setSize(666))); + } + + private void assertJson(String expected, QueryInnerHits hits) throws IOException { + QueryInnerHits queryInnerHits = copyAndAssert(hits); + String actual; + if (randomBoolean()) { + actual = oneLineJSON(queryInnerHits); + } else { + actual = oneLineJSON(hits); + } + assertEquals(expected, actual); + XContentParser parser = hits.getXcontentParser(); + assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); + QueryInnerHits other = copyAndAssert(new QueryInnerHits(parser)); + assertEquals(expected, oneLineJSON(other)); + } + + public QueryInnerHits copyAndAssert(QueryInnerHits hits) throws IOException { + BytesStreamOutput out = new BytesStreamOutput(); + hits.writeTo(out); + QueryInnerHits copy = randomBoolean() ? hits.readFrom(StreamInput.wrap(out.bytes())) : new QueryInnerHits(StreamInput.wrap(out.bytes())); + assertEquals(copy.toString() + " vs. " + hits.toString(), copy, hits); + return copy; + } + + private String oneLineJSON(QueryInnerHits hits) throws IOException { + XContentBuilder builder = XContentFactory.jsonBuilder(); + builder.startObject(); + hits.toXContent(builder, ToXContent.EMPTY_PARAMS); + builder.endObject(); + return builder.string().trim(); + } +} diff --git a/core/src/test/java/org/elasticsearch/index/search/MultiMatchQueryTests.java b/core/src/test/java/org/elasticsearch/index/search/MultiMatchQueryTests.java new file mode 100644 index 00000000000..831dc6c867a --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/search/MultiMatchQueryTests.java @@ -0,0 +1,85 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.search; + +import org.apache.lucene.index.Term; +import org.apache.lucene.search.*; +import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.index.Index; +import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.engine.Engine; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.query.IndexQueryParserService; +import org.elasticsearch.index.query.MultiMatchQueryBuilder; +import org.elasticsearch.index.query.QueryShardContext; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.junit.Before; + +import java.io.IOException; +import java.util.Arrays; + +import static org.elasticsearch.index.query.QueryBuilders.multiMatchQuery; + +public class MultiMatchQueryTests extends ESSingleNodeTestCase { + + private IndexQueryParserService queryParser; + private IndexService indexService; + + @Before + public void setup() throws IOException { + IndexService indexService = createIndex("test"); + MapperService mapperService = indexService.mapperService(); + String mapping = "{\n" + + " \"person\":{\n" + + " \"properties\":{\n" + + " \"name\":{\n" + + " \"properties\":{\n" + + " \"first\": {\n" + + " \"type\":\"string\"\n" + + " }," + + " \"last\": {\n" + + " \"type\":\"string\"\n" + + " }" + + " }" + + " }\n" + + " }\n" + + " }\n" + + "}"; + mapperService.merge("person", new CompressedXContent(mapping), true, false); + this.indexService = indexService; + queryParser = indexService.queryParserService(); + } + + public void testCrossFieldMultiMatchQuery() throws IOException { + QueryShardContext queryShardContext = new QueryShardContext(new Index("test"), queryParser); + queryShardContext.setAllowUnmappedFields(true); + Query parsedQuery = multiMatchQuery("banon").field("name.first", 2).field("name.last", 3).field("foobar").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS).toQuery(queryShardContext); + try (Engine.Searcher searcher = indexService.shardSafe(0).acquireSearcher("test")) { + Query rewrittenQuery = searcher.searcher().rewrite(parsedQuery); + + BooleanQuery.Builder expected = new BooleanQuery.Builder(); + expected.add(new TermQuery(new Term("foobar", "banon")), BooleanClause.Occur.SHOULD); + Query tq1 = new BoostQuery(new TermQuery(new Term("name.first", "banon")), 2); + Query tq2 = new BoostQuery(new TermQuery(new Term("name.last", "banon")), 3); + expected.add(new DisjunctionMaxQuery(Arrays.asList(tq1, tq2), 0f), BooleanClause.Occur.SHOULD); + assertEquals(expected.build(), rewrittenQuery); + } + } +} diff --git a/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index a65650c1b17..5176dc2934f 100644 --- a/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -90,10 +90,9 @@ import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_VERSION_CREATED; import static org.elasticsearch.common.settings.Settings.settingsBuilder; +import static org.elasticsearch.common.xcontent.ToXContent.EMPTY_PARAMS; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*; import static org.hamcrest.Matchers.equalTo; /** @@ -690,7 +689,6 @@ public class IndexShardTests extends ESSingleNodeTestCase { assertTrue(postIndexWithExceptionCalled.get()); } - public void testMaybeFlush() throws Exception { createIndex("test", settingsBuilder().put(TranslogConfig.INDEX_TRANSLOG_DURABILITY, Translog.Durabilty.REQUEST).build()); ensureGreen(); @@ -713,11 +711,14 @@ public class IndexShardTests extends ESSingleNodeTestCase { assertEquals(0, shard.engine().getTranslog().totalOperations()); shard.engine().getTranslog().sync(); long size = shard.engine().getTranslog().sizeInBytes(); + logger.info("--> current translog size: [{}] num_ops [{}] generation [{}]", shard.engine().getTranslog().sizeInBytes(), shard.engine().getTranslog().totalOperations(), shard.engine().getTranslog().getGeneration()); client().admin().indices().prepareUpdateSettings("test").setSettings(settingsBuilder().put(IndexShard.INDEX_TRANSLOG_FLUSH_THRESHOLD_OPS, 1000) .put(IndexShard.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, new ByteSizeValue(size, ByteSizeUnit.BYTES)) .build()).get(); client().prepareDelete("test", "test", "2").get(); + logger.info("--> translog size after delete: [{}] num_ops [{}] generation [{}]", shard.engine().getTranslog().sizeInBytes(), shard.engine().getTranslog().totalOperations(), shard.engine().getTranslog().getGeneration()); assertBusy(() -> { // this is async + logger.info("--> translog size on iter : [{}] num_ops [{}] generation [{}]", shard.engine().getTranslog().sizeInBytes(), shard.engine().getTranslog().totalOperations(), shard.engine().getTranslog().getGeneration()); assertFalse(shard.shouldFlush()); }); assertEquals(0, shard.engine().getTranslog().totalOperations()); diff --git a/core/src/test/java/org/elasticsearch/indices/DateMathIndexExpressionsIntegrationIT.java b/core/src/test/java/org/elasticsearch/indices/DateMathIndexExpressionsIntegrationIT.java index 6868137dcda..792f14bce1e 100644 --- a/core/src/test/java/org/elasticsearch/indices/DateMathIndexExpressionsIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/indices/DateMathIndexExpressionsIntegrationIT.java @@ -23,12 +23,15 @@ import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.hamcrest.ElasticsearchAssertions; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.joda.time.format.DateTimeFormat; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; @@ -51,8 +54,8 @@ public class DateMathIndexExpressionsIntegrationIT extends ESIntegTestCase { refresh(); SearchResponse searchResponse = client().prepareSearch(dateMathExp1, dateMathExp2, dateMathExp3).get(); - ElasticsearchAssertions.assertHitCount(searchResponse, 3); - ElasticsearchAssertions.assertSearchHits(searchResponse, "1", "2", "3"); + assertHitCount(searchResponse, 3); + assertSearchHits(searchResponse, "1", "2", "3"); GetResponse getResponse = client().prepareGet(dateMathExp1, "type", "1").get(); assertThat(getResponse.isExists(), is(true)); @@ -84,4 +87,45 @@ public class DateMathIndexExpressionsIntegrationIT extends ESIntegTestCase { assertThat(deleteResponse.getId(), equalTo("3")); } + public void testAutoCreateIndexWithDateMathExpression() throws Exception { + DateTime now = new DateTime(DateTimeZone.UTC); + String index1 = ".marvel-" + DateTimeFormat.forPattern("YYYY.MM.dd").print(now); + String index2 = ".marvel-" + DateTimeFormat.forPattern("YYYY.MM.dd").print(now.minusDays(1)); + String index3 = ".marvel-" + DateTimeFormat.forPattern("YYYY.MM.dd").print(now.minusDays(2)); + + String dateMathExp1 = "<.marvel-{now/d}>"; + String dateMathExp2 = "<.marvel-{now/d-1d}>"; + String dateMathExp3 = "<.marvel-{now/d-2d}>"; + client().prepareIndex(dateMathExp1, "type", "1").setSource("{}").get(); + client().prepareIndex(dateMathExp2, "type", "2").setSource("{}").get(); + client().prepareIndex(dateMathExp3, "type", "3").setSource("{}").get(); + refresh(); + + SearchResponse searchResponse = client().prepareSearch(dateMathExp1, dateMathExp2, dateMathExp3).get(); + assertHitCount(searchResponse, 3); + assertSearchHits(searchResponse, "1", "2", "3"); + + IndicesStatsResponse indicesStatsResponse = client().admin().indices().prepareStats(dateMathExp1, dateMathExp2, dateMathExp3).get(); + assertThat(indicesStatsResponse.getIndex(index1), notNullValue()); + assertThat(indicesStatsResponse.getIndex(index2), notNullValue()); + assertThat(indicesStatsResponse.getIndex(index3), notNullValue()); + } + + public void testCreateIndexWithDateMathExpression() throws Exception { + DateTime now = new DateTime(DateTimeZone.UTC); + String index1 = ".marvel-" + DateTimeFormat.forPattern("YYYY.MM.dd").print(now); + String index2 = ".marvel-" + DateTimeFormat.forPattern("YYYY.MM.dd").print(now.minusDays(1)); + String index3 = ".marvel-" + DateTimeFormat.forPattern("YYYY.MM.dd").print(now.minusDays(2)); + + String dateMathExp1 = "<.marvel-{now/d}>"; + String dateMathExp2 = "<.marvel-{now/d-1d}>"; + String dateMathExp3 = "<.marvel-{now/d-2d}>"; + createIndex(dateMathExp1, dateMathExp2, dateMathExp3); + + ClusterState clusterState = client().admin().cluster().prepareState().get().getState(); + assertThat(clusterState.metaData().index(index1), notNullValue()); + assertThat(clusterState.metaData().index(index2), notNullValue()); + assertThat(clusterState.metaData().index(index3), notNullValue()); + } + } diff --git a/core/src/test/java/org/elasticsearch/indices/IndicesModuleTests.java b/core/src/test/java/org/elasticsearch/indices/IndicesModuleTests.java index d44d7c2211a..c7a2624d380 100644 --- a/core/src/test/java/org/elasticsearch/indices/IndicesModuleTests.java +++ b/core/src/test/java/org/elasticsearch/indices/IndicesModuleTests.java @@ -20,13 +20,9 @@ package org.elasticsearch.indices; import org.apache.lucene.analysis.hunspell.Dictionary; -import org.apache.lucene.search.Query; import org.elasticsearch.common.inject.ModuleTestCase; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.query.QueryParseContext; -import org.elasticsearch.index.query.QueryParser; -import org.elasticsearch.common.ParsingException; -import org.elasticsearch.index.query.TermQueryParser; +import org.elasticsearch.index.query.*; import java.io.IOException; import java.io.InputStream; @@ -39,8 +35,14 @@ public class IndicesModuleTests extends ModuleTestCase { public String[] names() { return new String[] {"fake-query-parser"}; } + @Override - public Query parse(QueryParseContext parseContext) throws IOException, ParsingException { + public QueryBuilder fromXContent(QueryParseContext parseContext) throws IOException { + return null; + } + + @Override + public QueryBuilder getBuilderPrototype() { return null; } } diff --git a/core/src/test/java/org/elasticsearch/indices/cache/query/terms/TermsLookupTests.java b/core/src/test/java/org/elasticsearch/indices/cache/query/terms/TermsLookupTests.java new file mode 100644 index 00000000000..6474547cf29 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/indices/cache/query/terms/TermsLookupTests.java @@ -0,0 +1,85 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.indices.cache.query.terms; + +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.test.ESTestCase; +import org.junit.Test; + +import java.io.IOException; + +public class TermsLookupTests extends ESTestCase { + + @Test + public void testTermsLookup() { + String index = randomAsciiOfLengthBetween(1, 10); + String type = randomAsciiOfLengthBetween(1, 10); + String id = randomAsciiOfLengthBetween(1, 10); + String path = randomAsciiOfLengthBetween(1, 10); + String routing = randomAsciiOfLengthBetween(1, 10); + TermsLookup termsLookup = new TermsLookup(index, type, id, path); + termsLookup.routing(routing); + assertEquals(index, termsLookup.index()); + assertEquals(type, termsLookup.type()); + assertEquals(id, termsLookup.id()); + assertEquals(path, termsLookup.path()); + assertEquals(routing, termsLookup.routing()); + } + + @Test(expected=IllegalArgumentException.class) + public void testIllegalArguments() { + String type = randomAsciiOfLength(5); + String id = randomAsciiOfLength(5); + String path = randomAsciiOfLength(5); + switch (randomIntBetween(0, 2)) { + case 0: + type = null; break; + case 1: + id = null; break; + case 2: + path = null; break; + } + new TermsLookup(null, type, id, path); + } + + @Test + public void testSerialization() throws IOException { + TermsLookup termsLookup = randomTermsLookup(); + try (BytesStreamOutput output = new BytesStreamOutput()) { + termsLookup.writeTo(output); + try (StreamInput in = StreamInput.wrap(output.bytes())) { + TermsLookup deserializedLookup = TermsLookup.readTermsLookupFrom(in); + assertEquals(deserializedLookup, termsLookup); + assertEquals(deserializedLookup.hashCode(), termsLookup.hashCode()); + assertNotSame(deserializedLookup, termsLookup); + } + } + } + + public static TermsLookup randomTermsLookup() { + return new TermsLookup( + randomBoolean() ? randomAsciiOfLength(10) : null, + randomAsciiOfLength(10), + randomAsciiOfLength(10), + randomAsciiOfLength(10).replace('.', '_') + ).routing(randomBoolean() ? randomAsciiOfLength(10) : null); + } +} diff --git a/core/src/test/java/org/elasticsearch/indices/memory/IndexingMemoryControllerIT.java b/core/src/test/java/org/elasticsearch/indices/memory/IndexingMemoryControllerIT.java index 36ad646beaa..6c0f041b2b8 100644 --- a/core/src/test/java/org/elasticsearch/indices/memory/IndexingMemoryControllerIT.java +++ b/core/src/test/java/org/elasticsearch/indices/memory/IndexingMemoryControllerIT.java @@ -24,13 +24,14 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.index.engine.EngineConfig; -import org.elasticsearch.index.engine.SegmentsStats; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.node.internal.InternalSettingsPreparer; import org.elasticsearch.test.ESIntegTestCase; import org.junit.Test; +import java.util.concurrent.ExecutionException; + @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0) public class IndexingMemoryControllerIT extends ESIntegTestCase { @@ -77,7 +78,7 @@ public class IndexingMemoryControllerIT extends ESIntegTestCase { } @Test - public void testIndexBufferSizeUpdateInactiveShard() throws InterruptedException { + public void testIndexBufferSizeUpdateInactiveShard() throws InterruptedException, ExecutionException { createNode(Settings.builder().put(IndexingMemoryController.SHARD_INACTIVE_TIME_SETTING, "100ms").build()); @@ -86,6 +87,12 @@ public class IndexingMemoryControllerIT extends ESIntegTestCase { ensureGreen(); final IndexShard shard1 = internalCluster().getInstance(IndicesService.class).indexService("test1").shard(0); + + if (randomBoolean()) { + logger.info("--> indexing some pending operations"); + indexRandom(false, client().prepareIndex("test1", "type", "0").setSource("f", "0")); + } + boolean success = awaitBusy(() -> shard1.engine().config().getIndexingBufferSize().bytes() == EngineConfig.INACTIVE_SHARD_INDEXING_BUFFER.bytes()); if (!success) { fail("failed to update shard indexing buffer size due to inactive state. expected [" + EngineConfig.INACTIVE_SHARD_INDEXING_BUFFER + "] got [" + @@ -97,12 +104,15 @@ public class IndexingMemoryControllerIT extends ESIntegTestCase { success = awaitBusy(() -> shard1.engine().config().getIndexingBufferSize().bytes() > EngineConfig.INACTIVE_SHARD_INDEXING_BUFFER.bytes()); if (!success) { - fail("failed to update shard indexing buffer size due to inactive state. expected something larger then [" + EngineConfig.INACTIVE_SHARD_INDEXING_BUFFER + "] got [" + + fail("failed to update shard indexing buffer size due to active state. expected something larger then [" + EngineConfig.INACTIVE_SHARD_INDEXING_BUFFER + "] got [" + shard1.engine().config().getIndexingBufferSize().bytes() + "]" ); } - flush(); // clean translogs + if (randomBoolean()) { + logger.info("--> flushing translogs"); + flush(); // clean translogs + } success = awaitBusy(() -> shard1.engine().config().getIndexingBufferSize().bytes() == EngineConfig.INACTIVE_SHARD_INDEXING_BUFFER.bytes()); if (!success) { diff --git a/core/src/test/java/org/elasticsearch/nested/SimpleNestedIT.java b/core/src/test/java/org/elasticsearch/nested/SimpleNestedIT.java index 96db9bf7e2a..258c4adebfb 100644 --- a/core/src/test/java/org/elasticsearch/nested/SimpleNestedIT.java +++ b/core/src/test/java/org/elasticsearch/nested/SimpleNestedIT.java @@ -20,6 +20,7 @@ package org.elasticsearch.nested; import org.apache.lucene.search.Explanation; +import org.apache.lucene.search.join.ScoreMode; import org.elasticsearch.action.admin.cluster.health.ClusterHealthStatus; import org.elasticsearch.action.admin.cluster.stats.ClusterStatsResponse; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; @@ -311,7 +312,7 @@ public class SimpleNestedIT extends ESIntegTestCase { .execute().actionGet(); SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(nestedQuery("nested1", termQuery("nested1.n_field1", "n_value1")).scoreMode("total")) + .setQuery(nestedQuery("nested1", termQuery("nested1.n_field1", "n_value1")).scoreMode(ScoreMode.Total)) .setExplain(true) .execute().actionGet(); assertNoFailures(searchResponse); diff --git a/core/src/test/java/org/elasticsearch/percolator/MultiPercolatorIT.java b/core/src/test/java/org/elasticsearch/percolator/MultiPercolatorIT.java index 0af7be6950a..2565d5d61e1 100644 --- a/core/src/test/java/org/elasticsearch/percolator/MultiPercolatorIT.java +++ b/core/src/test/java/org/elasticsearch/percolator/MultiPercolatorIT.java @@ -18,6 +18,7 @@ */ package org.elasticsearch.percolator; +import org.apache.lucene.search.join.ScoreMode; import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.percolate.MultiPercolateRequestBuilder; import org.elasticsearch.action.percolate.MultiPercolateResponse; @@ -26,6 +27,7 @@ import org.elasticsearch.client.Requests; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.query.MatchQueryBuilder; +import org.elasticsearch.index.query.Operator; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.test.ESIntegTestCase; import org.junit.Test; @@ -360,7 +362,7 @@ public class MultiPercolatorIT extends ESIntegTestCase { ensureGreen("nestedindex"); client().prepareIndex("nestedindex", PercolatorService.TYPE_NAME, "Q").setSource(jsonBuilder().startObject() - .field("query", QueryBuilders.nestedQuery("employee", QueryBuilders.matchQuery("employee.name", "virginia potts").operator(MatchQueryBuilder.Operator.AND)).scoreMode("avg")).endObject()).get(); + .field("query", QueryBuilders.nestedQuery("employee", QueryBuilders.matchQuery("employee.name", "virginia potts").operator(Operator.AND)).scoreMode(ScoreMode.Avg)).endObject()).get(); refresh(); diff --git a/core/src/test/java/org/elasticsearch/percolator/PercolatorBackwardsCompatibilityIT.java b/core/src/test/java/org/elasticsearch/percolator/PercolatorBackwardsCompatibilityIT.java index 09cda74fa89..f250e9231fa 100644 --- a/core/src/test/java/org/elasticsearch/percolator/PercolatorBackwardsCompatibilityIT.java +++ b/core/src/test/java/org/elasticsearch/percolator/PercolatorBackwardsCompatibilityIT.java @@ -23,8 +23,8 @@ import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.percolate.PercolateResponse; import org.elasticsearch.action.percolate.PercolateSourceBuilder; import org.elasticsearch.index.percolator.PercolatorException; -import org.elasticsearch.common.ParsingException; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.index.query.QueryShardException; import org.junit.Test; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; @@ -67,7 +67,7 @@ public class PercolatorBackwardsCompatibilityIT extends ESIntegTestCase { fail(); } catch (PercolatorException e) { e.printStackTrace(); - assertThat(e.getRootCause(), instanceOf(ParsingException.class)); + assertThat(e.getRootCause(), instanceOf(QueryShardException.class)); } } diff --git a/core/src/test/java/org/elasticsearch/percolator/PercolatorIT.java b/core/src/test/java/org/elasticsearch/percolator/PercolatorIT.java index 14071627799..5ddeeaf5fc2 100644 --- a/core/src/test/java/org/elasticsearch/percolator/PercolatorIT.java +++ b/core/src/test/java/org/elasticsearch/percolator/PercolatorIT.java @@ -18,6 +18,7 @@ */ package org.elasticsearch.percolator; +import org.apache.lucene.search.join.ScoreMode; import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; @@ -41,11 +42,11 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.engine.DocumentMissingException; import org.elasticsearch.index.engine.VersionConflictEngineException; import org.elasticsearch.index.percolator.PercolatorException; -import org.elasticsearch.index.query.MatchQueryBuilder; +import org.elasticsearch.index.query.Operator; import org.elasticsearch.index.query.QueryBuilders; -import org.elasticsearch.common.ParsingException; +import org.elasticsearch.index.query.QueryShardException; import org.elasticsearch.index.query.functionscore.weight.WeightBuilder; -import org.elasticsearch.index.query.support.QueryInnerHitBuilder; +import org.elasticsearch.index.query.support.QueryInnerHits; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.script.Script; import org.elasticsearch.search.highlight.HighlightBuilder; @@ -1419,7 +1420,7 @@ public class PercolatorIT extends ESIntegTestCase { .setSize(5) .setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", "The quick brown fox jumps over the lazy dog").endObject())) .setHighlightBuilder(new HighlightBuilder().field("field1")) - .setPercolateQuery(functionScoreQuery(matchAllQuery()).add(new WeightBuilder().setWeight(5.5f))) + .setPercolateQuery(functionScoreQuery(new WeightBuilder().setWeight(5.5f))) .setScore(true) .execute().actionGet(); assertNoFailures(response); @@ -1451,7 +1452,7 @@ public class PercolatorIT extends ESIntegTestCase { .setSize(5) .setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", "The quick brown fox jumps over the lazy dog").endObject())) .setHighlightBuilder(new HighlightBuilder().field("field1")) - .setPercolateQuery(functionScoreQuery(matchAllQuery()).add(new WeightBuilder().setWeight(5.5f))) + .setPercolateQuery(functionScoreQuery(new WeightBuilder().setWeight(5.5f))) .setSortByScore(true) .execute().actionGet(); assertMatchCount(response, 5l); @@ -1483,7 +1484,7 @@ public class PercolatorIT extends ESIntegTestCase { .setSize(5) .setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", "The quick brown fox jumps over the lazy dog").endObject())) .setHighlightBuilder(new HighlightBuilder().field("field1").highlightQuery(QueryBuilders.matchQuery("field1", "jumps"))) - .setPercolateQuery(functionScoreQuery(matchAllQuery()).add(new WeightBuilder().setWeight(5.5f))) + .setPercolateQuery(functionScoreQuery(new WeightBuilder().setWeight(5.5f))) .setSortByScore(true) .execute().actionGet(); assertMatchCount(response, 5l); @@ -1520,7 +1521,7 @@ public class PercolatorIT extends ESIntegTestCase { .setSize(5) .setGetRequest(Requests.getRequest("test").type("type").id("1")) .setHighlightBuilder(new HighlightBuilder().field("field1")) - .setPercolateQuery(functionScoreQuery(matchAllQuery()).add(new WeightBuilder().setWeight(5.5f))) + .setPercolateQuery(functionScoreQuery(new WeightBuilder().setWeight(5.5f))) .setSortByScore(true) .execute().actionGet(); assertMatchCount(response, 5l); @@ -1737,7 +1738,7 @@ public class PercolatorIT extends ESIntegTestCase { .get(); fail(); } catch (PercolatorException e) { - assertThat(e.getRootCause(), instanceOf(ParsingException.class)); + assertThat(e.getRootCause(), instanceOf(QueryShardException.class)); } try { @@ -1746,7 +1747,7 @@ public class PercolatorIT extends ESIntegTestCase { .get(); fail(); } catch (PercolatorException e) { - assertThat(e.getRootCause(), instanceOf(ParsingException.class)); + assertThat(e.getRootCause(), instanceOf(QueryShardException.class)); } } @@ -1785,7 +1786,7 @@ public class PercolatorIT extends ESIntegTestCase { ensureGreen("nestedindex"); client().prepareIndex("nestedindex", PercolatorService.TYPE_NAME, "Q").setSource(jsonBuilder().startObject() - .field("query", QueryBuilders.nestedQuery("employee", QueryBuilders.matchQuery("employee.name", "virginia potts").operator(MatchQueryBuilder.Operator.AND)).scoreMode("avg")).endObject()).get(); + .field("query", QueryBuilders.nestedQuery("employee", QueryBuilders.matchQuery("employee.name", "virginia potts").operator(Operator.AND)).scoreMode(ScoreMode.Avg)).endObject()).get(); refresh(); @@ -1985,11 +1986,11 @@ public class PercolatorIT extends ESIntegTestCase { assertAcked(prepareCreate("index").addMapping("mapping", mapping)); try { client().prepareIndex("index", PercolatorService.TYPE_NAME, "1") - .setSource(jsonBuilder().startObject().field("query", nestedQuery("nested", matchQuery("nested.name", "value")).innerHit(new QueryInnerHitBuilder())).endObject()) + .setSource(jsonBuilder().startObject().field("query", nestedQuery("nested", matchQuery("nested.name", "value")).innerHit(new QueryInnerHits())).endObject()) .execute().actionGet(); fail("Expected a parse error, because inner_hits isn't supported in the percolate api"); } catch (Exception e) { - assertThat(e.getCause(), instanceOf(ParsingException.class)); + assertThat(e.getCause(), instanceOf(QueryShardException.class)); assertThat(e.getCause().getMessage(), containsString("inner_hits unsupported")); } } diff --git a/core/src/test/java/org/elasticsearch/rest/BytesRestResponseTests.java b/core/src/test/java/org/elasticsearch/rest/BytesRestResponseTests.java index 1ec4b5bea52..76d08eadc8d 100644 --- a/core/src/test/java/org/elasticsearch/rest/BytesRestResponseTests.java +++ b/core/src/test/java/org/elasticsearch/rest/BytesRestResponseTests.java @@ -23,8 +23,7 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.ShardSearchFailure; -import org.elasticsearch.index.Index; -import org.elasticsearch.index.query.TestParsingException; +import org.elasticsearch.common.ParsingException; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.rest.FakeRestRequest; @@ -34,10 +33,7 @@ import org.junit.Test; import java.io.FileNotFoundException; import java.io.IOException; -import static org.hamcrest.Matchers.contains; -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.not; -import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.*; /** * @@ -142,17 +138,17 @@ public class BytesRestResponseTests extends ESTestCase { public void testConvert() throws IOException { RestRequest request = new FakeRestRequest(); RestChannel channel = new DetailedExceptionRestChannel(request); - ShardSearchFailure failure = new ShardSearchFailure(new TestParsingException(new Index("foo"), "foobar", null), + ShardSearchFailure failure = new ShardSearchFailure(new ParsingException(1, 2, "foobar", null), new SearchShardTarget("node_1", "foo", 1)); - ShardSearchFailure failure1 = new ShardSearchFailure(new TestParsingException(new Index("foo"), "foobar", null), + ShardSearchFailure failure1 = new ShardSearchFailure(new ParsingException(1, 2, "foobar", null), new SearchShardTarget("node_1", "foo", 2)); SearchPhaseExecutionException ex = new SearchPhaseExecutionException("search", "all shards failed", new ShardSearchFailure[] {failure, failure1}); BytesRestResponse response = new BytesRestResponse(channel, new RemoteTransportException("foo", ex)); String text = response.content().toUtf8(); - String expected = "{\"error\":{\"root_cause\":[{\"type\":\"test_parsing_exception\",\"reason\":\"foobar\",\"index\":\"foo\"}],\"type\":\"search_phase_execution_exception\",\"reason\":\"all shards failed\",\"phase\":\"search\",\"grouped\":true,\"failed_shards\":[{\"shard\":1,\"index\":\"foo\",\"node\":\"node_1\",\"reason\":{\"type\":\"test_parsing_exception\",\"reason\":\"foobar\",\"index\":\"foo\"}}]},\"status\":400}"; + String expected = "{\"error\":{\"root_cause\":[{\"type\":\"parsing_exception\",\"reason\":\"foobar\",\"line\":1,\"col\":2}],\"type\":\"search_phase_execution_exception\",\"reason\":\"all shards failed\",\"phase\":\"search\",\"grouped\":true,\"failed_shards\":[{\"shard\":1,\"index\":\"foo\",\"node\":\"node_1\",\"reason\":{\"type\":\"parsing_exception\",\"reason\":\"foobar\",\"line\":1,\"col\":2}}]},\"status\":400}"; assertEquals(expected.trim(), text.trim()); String stackTrace = ExceptionsHelper.stackTrace(ex); - assertTrue(stackTrace.contains("Caused by: [foo] TestParsingException[foobar]")); + assertTrue(stackTrace.contains("Caused by: ParsingException[foobar]")); } public static class WithHeadersException extends ElasticsearchException { diff --git a/core/src/test/java/org/elasticsearch/script/GroovyScriptIT.java b/core/src/test/java/org/elasticsearch/script/GroovyScriptIT.java index 6a35012c818..320b00036c9 100644 --- a/core/src/test/java/org/elasticsearch/script/GroovyScriptIT.java +++ b/core/src/test/java/org/elasticsearch/script/GroovyScriptIT.java @@ -104,9 +104,7 @@ public class GroovyScriptIT extends ESIntegTestCase { refresh(); // doc[] access - SearchResponse resp = client().prepareSearch("test").setQuery(functionScoreQuery(matchAllQuery()) -.add( - scriptFunction(new Script("doc['bar'].value", ScriptType.INLINE, "groovy", null))) + SearchResponse resp = client().prepareSearch("test").setQuery(functionScoreQuery(scriptFunction(new Script("doc['bar'].value", ScriptType.INLINE, "groovy", null))) .boostMode(CombineFunction.REPLACE)).get(); assertNoFailures(resp); @@ -120,8 +118,8 @@ public class GroovyScriptIT extends ESIntegTestCase { refresh(); // _score can be accessed - SearchResponse resp = client().prepareSearch("test").setQuery(functionScoreQuery(matchQuery("foo", "dog")) - .add(scriptFunction(new Script("_score", ScriptType.INLINE, "groovy", null))) + SearchResponse resp = client().prepareSearch("test").setQuery(functionScoreQuery(matchQuery("foo", "dog"), + scriptFunction(new Script("_score", ScriptType.INLINE, "groovy", null))) .boostMode(CombineFunction.REPLACE)).get(); assertNoFailures(resp); assertSearchHits(resp, "3", "1"); @@ -132,7 +130,7 @@ public class GroovyScriptIT extends ESIntegTestCase { resp = client() .prepareSearch("test") .setQuery( - functionScoreQuery(matchQuery("foo", "dog")).add( + functionScoreQuery(matchQuery("foo", "dog"), scriptFunction(new Script("_score > 0.0 ? _score : 0", ScriptType.INLINE, "groovy", null))).boostMode( CombineFunction.REPLACE)).get(); assertNoFailures(resp); diff --git a/core/src/test/java/org/elasticsearch/search/MultiValueModeTests.java b/core/src/test/java/org/elasticsearch/search/MultiValueModeTests.java index 5bf1a0e9139..a84bc09b94d 100644 --- a/core/src/test/java/org/elasticsearch/search/MultiValueModeTests.java +++ b/core/src/test/java/org/elasticsearch/search/MultiValueModeTests.java @@ -24,6 +24,8 @@ import org.apache.lucene.index.*; import org.apache.lucene.util.BitDocIdSet; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.FixedBitSet; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.index.fielddata.FieldData; import org.elasticsearch.index.fielddata.NumericDoubleValues; import org.elasticsearch.index.fielddata.SortedBinaryDocValues; @@ -33,6 +35,8 @@ import org.elasticsearch.test.ESTestCase; import java.io.IOException; import java.util.Arrays; +import static org.hamcrest.Matchers.equalTo; + public class MultiValueModeTests extends ESTestCase { private static FixedBitSet randomRootDocs(int maxDoc) { @@ -733,4 +737,94 @@ public class MultiValueModeTests extends ESTestCase { } } } + + public void testValidOrdinals() { + assertThat(MultiValueMode.SUM.ordinal(), equalTo(0)); + assertThat(MultiValueMode.AVG.ordinal(), equalTo(1)); + assertThat(MultiValueMode.MEDIAN.ordinal(), equalTo(2)); + assertThat(MultiValueMode.MIN.ordinal(), equalTo(3)); + assertThat(MultiValueMode.MAX.ordinal(), equalTo(4)); + } + + public void testWriteTo() throws Exception { + try (BytesStreamOutput out = new BytesStreamOutput()) { + MultiValueMode.SUM.writeTo(out); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + assertThat(in.readVInt(), equalTo(0)); + } + } + + try (BytesStreamOutput out = new BytesStreamOutput()) { + MultiValueMode.AVG.writeTo(out); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + assertThat(in.readVInt(), equalTo(1)); + } + } + + try (BytesStreamOutput out = new BytesStreamOutput()) { + MultiValueMode.MEDIAN.writeTo(out); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + assertThat(in.readVInt(), equalTo(2)); + } + } + + try (BytesStreamOutput out = new BytesStreamOutput()) { + MultiValueMode.MIN.writeTo(out); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + assertThat(in.readVInt(), equalTo(3)); + } + } + + try (BytesStreamOutput out = new BytesStreamOutput()) { + MultiValueMode.MAX.writeTo(out); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + assertThat(in.readVInt(), equalTo(4)); + } + } + } + + public void testReadFrom() throws Exception { + try (BytesStreamOutput out = new BytesStreamOutput()) { + out.writeVInt(0); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + assertThat(MultiValueMode.readMultiValueModeFrom(in), equalTo(MultiValueMode.SUM)); + } + } + + try (BytesStreamOutput out = new BytesStreamOutput()) { + out.writeVInt(1); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + assertThat(MultiValueMode.readMultiValueModeFrom(in), equalTo(MultiValueMode.AVG)); + } + } + + try (BytesStreamOutput out = new BytesStreamOutput()) { + out.writeVInt(2); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + assertThat(MultiValueMode.readMultiValueModeFrom(in), equalTo(MultiValueMode.MEDIAN)); + } + } + + try (BytesStreamOutput out = new BytesStreamOutput()) { + out.writeVInt(3); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + assertThat(MultiValueMode.readMultiValueModeFrom(in), equalTo(MultiValueMode.MIN)); + } + } + + try (BytesStreamOutput out = new BytesStreamOutput()) { + out.writeVInt(4); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + assertThat(MultiValueMode.readMultiValueModeFrom(in), equalTo(MultiValueMode.MAX)); + } + } + } + + public void testFromString() { + assertThat(MultiValueMode.fromString("sum"), equalTo(MultiValueMode.SUM)); + assertThat(MultiValueMode.fromString("avg"), equalTo(MultiValueMode.AVG)); + assertThat(MultiValueMode.fromString("median"), equalTo(MultiValueMode.MEDIAN)); + assertThat(MultiValueMode.fromString("min"), equalTo(MultiValueMode.MIN)); + assertThat(MultiValueMode.fromString("max"), equalTo(MultiValueMode.MAX)); + } } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DoubleTermsIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DoubleTermsIT.java index 2bb98c2d59d..dbae3c19a87 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DoubleTermsIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DoubleTermsIT.java @@ -1201,8 +1201,7 @@ public class DoubleTermsIT extends AbstractTermsTestCase { .prepareSearch("idx") .setTypes("type") .setQuery( - functionScoreQuery(matchAllQuery()).add( - ScoreFunctionBuilders.scriptFunction(new Script("doc['" + SINGLE_VALUED_FIELD_NAME + "'].value")))) + functionScoreQuery(ScoreFunctionBuilders.scriptFunction(new Script("doc['" + SINGLE_VALUED_FIELD_NAME + "'].value")))) .addAggregation( terms("terms").collectMode(randomFrom(SubAggCollectionMode.values())).script( new Script("ceil(_score.doubleValue()/3)"))).execute().actionGet(); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoHashGridIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoHashGridIT.java index e2708b12314..e08b6d780bd 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoHashGridIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoHashGridIT.java @@ -212,7 +212,7 @@ public class GeoHashGridIT extends ESIntegTestCase { @Test public void filtered() throws Exception { GeoBoundingBoxQueryBuilder bbox = new GeoBoundingBoxQueryBuilder("location"); - bbox.topLeft(smallestGeoHash).bottomRight(smallestGeoHash).queryName("bbox"); + bbox.setCorners(smallestGeoHash, smallestGeoHash).queryName("bbox"); for (int precision = 1; precision <= XGeoHashUtils.PRECISION; precision++) { SearchResponse response = client().prepareSearch("idx") .addAggregation( diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java index 9660217031b..26cb3a968b0 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java @@ -29,7 +29,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryBuilders; -import org.elasticsearch.common.ParsingException; +import org.elasticsearch.index.query.QueryShardException; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptModule; @@ -237,7 +237,7 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase { @Override public SignificanceHeuristic parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher, SearchContext context) - throws IOException, ParsingException { + throws IOException, QueryShardException { parser.nextToken(); return new SimpleHeuristic(); } @@ -621,4 +621,4 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase { } indexRandom(true, indexRequestBuilderList); } -} \ No newline at end of file +} diff --git a/core/src/test/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java b/core/src/test/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java index 369460203c7..a9b41ce4025 100644 --- a/core/src/test/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java @@ -52,6 +52,7 @@ import static org.elasticsearch.action.search.SearchType.DFS_QUERY_AND_FETCH; import static org.elasticsearch.action.search.SearchType.DFS_QUERY_THEN_FETCH; import static org.elasticsearch.action.search.SearchType.QUERY_AND_FETCH; import static org.elasticsearch.action.search.SearchType.QUERY_THEN_FETCH; + import static org.elasticsearch.client.Requests.createIndexRequest; import static org.elasticsearch.client.Requests.searchRequest; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; @@ -427,8 +428,8 @@ public class TransportTwoNodesSearchIT extends ESIntegTestCase { logger.info("Start Testing failed multi search with a wrong query"); MultiSearchResponse response = client().prepareMultiSearch() - // Add function score with a bogus score mode - .add(client().prepareSearch("test").setQuery(QueryBuilders.functionScoreQuery(QueryBuilders.termQuery("nid", 1)).scoreMode("foobar"))) + // Add geo distance range query against a field that doesn't exist (should be a geo point for the query to work) + .add(client().prepareSearch("test").setQuery(QueryBuilders.geoDistanceRangeQuery("non_existing_field", 1, 1).from(10).to(15))) .add(client().prepareSearch("test").setQuery(QueryBuilders.termQuery("nid", 2))) .add(client().prepareSearch("test").setQuery(QueryBuilders.matchAllQuery())) .execute().actionGet(); @@ -444,7 +445,6 @@ public class TransportTwoNodesSearchIT extends ESIntegTestCase { logger.info("Done Testing failed search"); } - @Test public void testFailedMultiSearchWithWrongQuery_withFunctionScore() throws Exception { prepareData(); @@ -453,7 +453,7 @@ public class TransportTwoNodesSearchIT extends ESIntegTestCase { MultiSearchResponse response = client().prepareMultiSearch() // Add custom score query with bogus script - .add(client().prepareSearch("test").setQuery(QueryBuilders.functionScoreQuery(QueryBuilders.termQuery("nid", 1)).add(new ScriptScoreFunctionBuilder(new Script("foo", ScriptService.ScriptType.INLINE, "bar", null))))) + .add(client().prepareSearch("test").setQuery(QueryBuilders.functionScoreQuery(QueryBuilders.termQuery("nid", 1), new ScriptScoreFunctionBuilder(new Script("foo", ScriptService.ScriptType.INLINE, "bar", null))))) .add(client().prepareSearch("test").setQuery(QueryBuilders.termQuery("nid", 2))) .add(client().prepareSearch("test").setQuery(QueryBuilders.matchAllQuery())) .execute().actionGet(); diff --git a/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java b/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java index 43dd8158013..982a3d9a156 100644 --- a/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java @@ -27,9 +27,10 @@ import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchType; + import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.collect.HppcMaps; import org.elasticsearch.common.lucene.search.function.CombineFunction; +import org.elasticsearch.common.lucene.search.function.FiltersFunctionScoreQuery; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.cache.IndexCacheModule; @@ -37,6 +38,7 @@ import org.elasticsearch.index.mapper.MergeMappingException; import org.elasticsearch.index.query.HasChildQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.script.Script; import org.elasticsearch.search.aggregations.AggregationBuilders; @@ -52,42 +54,15 @@ import org.hamcrest.Matchers; import org.junit.Test; import java.io.IOException; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Locale; -import java.util.Map; -import java.util.Set; +import java.util.*; import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.index.query.QueryBuilders.boolQuery; -import static org.elasticsearch.index.query.QueryBuilders.constantScoreQuery; -import static org.elasticsearch.index.query.QueryBuilders.hasParentQuery; -import static org.elasticsearch.index.query.QueryBuilders.idsQuery; -import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; -import static org.elasticsearch.index.query.QueryBuilders.matchQuery; -import static org.elasticsearch.index.query.QueryBuilders.multiMatchQuery; -import static org.elasticsearch.index.query.QueryBuilders.notQuery; -import static org.elasticsearch.index.query.QueryBuilders.prefixQuery; -import static org.elasticsearch.index.query.QueryBuilders.queryStringQuery; -import static org.elasticsearch.index.query.QueryBuilders.termQuery; -import static org.elasticsearch.index.query.QueryBuilders.termsQuery; +import static org.elasticsearch.index.query.QueryBuilders.*; import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.scriptFunction; import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.weightFactorFunction; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHit; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasId; -import static org.hamcrest.Matchers.anyOf; -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.greaterThanOrEqualTo; -import static org.hamcrest.Matchers.instanceOf; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.notNullValue; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*; +import static org.hamcrest.Matchers.*; /** * @@ -294,11 +269,11 @@ public class ChildQuerySearchIT extends ESIntegTestCase { for (int i = 1; i <= 10; i++) { logger.info("Round {}", i); SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(constantScoreQuery(hasChildQuery("child", matchAllQuery()).scoreMode("max"))) + .setQuery(constantScoreQuery(hasChildQuery("child", matchAllQuery()).scoreMode(ScoreMode.Max))) .get(); assertNoFailures(searchResponse); searchResponse = client().prepareSearch("test") - .setQuery(constantScoreQuery(hasParentQuery("parent", matchAllQuery()).scoreMode("score"))) + .setQuery(constantScoreQuery(hasParentQuery("parent", matchAllQuery()).score(true))) .get(); assertNoFailures(searchResponse); } @@ -556,11 +531,11 @@ public class ChildQuerySearchIT extends ESIntegTestCase { client().prepareIndex("test", "child", "c1").setSource("c_field", "1").setParent(parentId).get(); refresh(); - CountResponse countResponse = client().prepareCount("test").setQuery(hasChildQuery("child", termQuery("c_field", "1")).scoreMode("max")) + CountResponse countResponse = client().prepareCount("test").setQuery(hasChildQuery("child", termQuery("c_field", "1")).scoreMode(ScoreMode.Max)) .get(); assertHitCount(countResponse, 1l); - countResponse = client().prepareCount("test").setQuery(hasParentQuery("parent", termQuery("p_field", "1")).scoreMode("score")) + countResponse = client().prepareCount("test").setQuery(hasParentQuery("parent", termQuery("p_field", "1")).score(true)) .get(); assertHitCount(countResponse, 1l); @@ -587,20 +562,20 @@ public class ChildQuerySearchIT extends ESIntegTestCase { SearchResponse searchResponse = client().prepareSearch("test") .setExplain(true) - .setQuery(hasChildQuery("child", termQuery("c_field", "1")).scoreMode("max")) + .setQuery(hasChildQuery("child", termQuery("c_field", "1")).scoreMode(ScoreMode.Max)) .get(); assertHitCount(searchResponse, 1l); assertThat(searchResponse.getHits().getAt(0).explanation().getDescription(), equalTo("Score based on join value p1")); searchResponse = client().prepareSearch("test") .setExplain(true) - .setQuery(hasParentQuery("parent", termQuery("p_field", "1")).scoreMode("score")) + .setQuery(hasParentQuery("parent", termQuery("p_field", "1")).score(true)) .get(); assertHitCount(searchResponse, 1l); assertThat(searchResponse.getHits().getAt(0).explanation().getDescription(), equalTo("Score based on join value p1")); ExplainResponse explainResponse = client().prepareExplain("test", "parent", parentId) - .setQuery(hasChildQuery("child", termQuery("c_field", "1")).scoreMode("max")) + .setQuery(hasChildQuery("child", termQuery("c_field", "1")).scoreMode(ScoreMode.Max)) .get(); assertThat(explainResponse.isExists(), equalTo(true)); assertThat(explainResponse.getExplanation().getDetails()[0].getDescription(), equalTo("Score based on join value p1")); @@ -678,7 +653,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { "child", QueryBuilders.functionScoreQuery(matchQuery("c_field2", 0), scriptFunction(new Script("doc['c_field1'].value"))) - .boostMode(CombineFunction.REPLACE.getName())).scoreMode("total")).get(); + .boostMode(CombineFunction.REPLACE)).scoreMode(ScoreMode.Total)).get(); assertThat(response.getHits().totalHits(), equalTo(3l)); assertThat(response.getHits().hits()[0].id(), equalTo("1")); @@ -695,7 +670,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { "child", QueryBuilders.functionScoreQuery(matchQuery("c_field2", 0), scriptFunction(new Script("doc['c_field1'].value"))) - .boostMode(CombineFunction.REPLACE.getName())).scoreMode("max")).get(); + .boostMode(CombineFunction.REPLACE)).scoreMode(ScoreMode.Max)).get(); assertThat(response.getHits().totalHits(), equalTo(3l)); assertThat(response.getHits().hits()[0].id(), equalTo("3")); @@ -712,7 +687,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { "child", QueryBuilders.functionScoreQuery(matchQuery("c_field2", 0), scriptFunction(new Script("doc['c_field1'].value"))) - .boostMode(CombineFunction.REPLACE.getName())).scoreMode("avg")).get(); + .boostMode(CombineFunction.REPLACE)).scoreMode(ScoreMode.Avg)).get(); assertThat(response.getHits().totalHits(), equalTo(3l)); assertThat(response.getHits().hits()[0].id(), equalTo("3")); @@ -729,7 +704,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { "parent", QueryBuilders.functionScoreQuery(matchQuery("p_field1", "p_value3"), scriptFunction(new Script("doc['p_field2'].value"))) - .boostMode(CombineFunction.REPLACE.getName())).scoreMode("score")) + .boostMode(CombineFunction.REPLACE)).score(true)) .addSort(SortBuilders.fieldSort("c_field3")).addSort(SortBuilders.scoreSort()).get(); assertThat(response.getHits().totalHits(), equalTo(7l)); @@ -769,7 +744,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { assertNoFailures(response); assertThat(response.getHits().totalHits(), equalTo(0l)); - response = client().prepareSearch("test").setQuery(QueryBuilders.hasChildQuery("child", matchQuery("text", "value")).scoreMode("max")) + response = client().prepareSearch("test").setQuery(QueryBuilders.hasChildQuery("child", matchQuery("text", "value")).scoreMode(ScoreMode.Max)) .get(); assertNoFailures(response); assertThat(response.getHits().totalHits(), equalTo(0l)); @@ -778,7 +753,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { assertNoFailures(response); assertThat(response.getHits().totalHits(), equalTo(0l)); - response = client().prepareSearch("test").setQuery(QueryBuilders.hasParentQuery("child", matchQuery("text", "value")).scoreMode("score")) + response = client().prepareSearch("test").setQuery(QueryBuilders.hasParentQuery("child", matchQuery("text", "value")).score(true)) .get(); assertNoFailures(response); assertThat(response.getHits().totalHits(), equalTo(0l)); @@ -865,7 +840,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { SearchType[] searchTypes = new SearchType[]{SearchType.QUERY_THEN_FETCH, SearchType.DFS_QUERY_THEN_FETCH}; for (SearchType searchType : searchTypes) { SearchResponse searchResponse = client().prepareSearch("test").setSearchType(searchType) - .setQuery(hasChildQuery("child", prefixQuery("c_field", "c")).scoreMode("max")).addSort("p_field", SortOrder.ASC) + .setQuery(hasChildQuery("child", prefixQuery("c_field", "c")).scoreMode(ScoreMode.Max)).addSort("p_field", SortOrder.ASC) .setSize(5).get(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().totalHits(), equalTo(10L)); @@ -876,7 +851,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { assertThat(searchResponse.getHits().hits()[4].id(), equalTo("p004")); searchResponse = client().prepareSearch("test").setSearchType(searchType) - .setQuery(hasParentQuery("parent", prefixQuery("p_field", "p")).scoreMode("score")).addSort("c_field", SortOrder.ASC) + .setQuery(hasParentQuery("parent", prefixQuery("p_field", "p")).score(true)).addSort("c_field", SortOrder.ASC) .setSize(5).get(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().totalHits(), equalTo(500L)); @@ -908,7 +883,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { refresh(); SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(hasChildQuery("child", termQuery("c_field", "yellow")).scoreMode("total")).get(); + .setQuery(hasChildQuery("child", termQuery("c_field", "yellow")).scoreMode(ScoreMode.Total)).get(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().totalHits(), equalTo(1l)); assertThat(searchResponse.getHits().getAt(0).id(), equalTo("p1")); @@ -918,7 +893,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { .prepareSearch("test") .setQuery( boolQuery().must(matchQuery("c_field", "x")).must( - hasParentQuery("parent", termQuery("p_field", "p_value2")).scoreMode("score"))).get(); + hasParentQuery("parent", termQuery("p_field", "p_value2")).score(true))).get(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().totalHits(), equalTo(2l)); assertThat(searchResponse.getHits().getAt(0).id(), equalTo("c3")); @@ -933,7 +908,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { client().admin().indices().prepareRefresh("test").get(); } - searchResponse = client().prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "yellow")).scoreMode("total")) + searchResponse = client().prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "yellow")).scoreMode(ScoreMode.Total)) .get(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().totalHits(), equalTo(1l)); @@ -944,7 +919,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { .prepareSearch("test") .setQuery( boolQuery().must(matchQuery("c_field", "x")).must( - hasParentQuery("parent", termQuery("p_field", "p_value2")).scoreMode("score"))).get(); + hasParentQuery("parent", termQuery("p_field", "p_value2")).score(true))).get(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().totalHits(), equalTo(2l)); assertThat(searchResponse.getHits().getAt(0).id(), Matchers.anyOf(equalTo("c3"), equalTo("c4"))); @@ -969,7 +944,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { client().prepareIndex("test", "child", "c5").setSource("c_field", "x").setParent("p2").get(); refresh(); - SearchResponse searchResponse = client().prepareSearch("test").setQuery(hasChildQuery("child", matchAllQuery()).scoreMode("total")) + SearchResponse searchResponse = client().prepareSearch("test").setQuery(hasChildQuery("child", matchAllQuery()).scoreMode(ScoreMode.Total)) .setMinScore(3) // Score needs to be 3 or above! .get(); assertNoFailures(searchResponse); @@ -1238,7 +1213,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { client().prepareIndex("test", "child", "c3").setParent("p2").setSource("c_field", "red").get(); refresh(); - String scoreMode = ScoreMode.values()[getRandom().nextInt(ScoreMode.values().length)].name().toLowerCase(Locale.ROOT); + ScoreMode scoreMode = randomFrom(ScoreMode.values()); SearchResponse searchResponse = client().prepareSearch("test") .setQuery(boolQuery().must(QueryBuilders.hasChildQuery("child", termQuery("c_field", "blue")).scoreMode(scoreMode)).filter(notQuery(termQuery("p_field", "3")))) .get(); @@ -1264,13 +1239,13 @@ public class ChildQuerySearchIT extends ESIntegTestCase { client().prepareIndex("test", "child", "c1").setSource("c_field", "1").setParent(parentId).get(); refresh(); - SearchResponse searchResponse = client().prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "1")).scoreMode("max").queryName("test")) + SearchResponse searchResponse = client().prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "1")).scoreMode(ScoreMode.Max).queryName("test")) .get(); assertHitCount(searchResponse, 1l); assertThat(searchResponse.getHits().getAt(0).getMatchedQueries().length, equalTo(1)); assertThat(searchResponse.getHits().getAt(0).getMatchedQueries()[0], equalTo("test")); - searchResponse = client().prepareSearch("test").setQuery(hasParentQuery("parent", termQuery("p_field", "1")).scoreMode("score").queryName("test")) + searchResponse = client().prepareSearch("test").setQuery(hasParentQuery("parent", termQuery("p_field", "1")).score(true).queryName("test")) .get(); assertHitCount(searchResponse, 1l); assertThat(searchResponse.getHits().getAt(0).getMatchedQueries().length, equalTo(1)); @@ -1312,7 +1287,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { try { client().prepareSearch("test") - .setQuery(hasChildQuery("child", termQuery("c_field", "1")).scoreMode("max")) + .setQuery(hasChildQuery("child", termQuery("c_field", "1")).scoreMode(ScoreMode.Max)) .get(); fail(); } catch (SearchPhaseExecutionException e) { @@ -1330,7 +1305,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { try { client().prepareSearch("test") - .setQuery(hasParentQuery("parent", termQuery("p_field", "1")).scoreMode("score")) + .setQuery(hasParentQuery("parent", termQuery("p_field", "1")).score(true)) .get(); fail(); } catch (SearchPhaseExecutionException e) { @@ -1580,14 +1555,15 @@ public class ChildQuerySearchIT extends ESIntegTestCase { return indexBuilders; } - private SearchResponse minMaxQuery(String scoreMode, int minChildren, Integer maxChildren) throws SearchPhaseExecutionException { + private SearchResponse minMaxQuery(ScoreMode scoreMode, int minChildren, Integer maxChildren) throws SearchPhaseExecutionException { HasChildQueryBuilder hasChildQuery = hasChildQuery( "child", - QueryBuilders.functionScoreQuery(constantScoreQuery(QueryBuilders.termQuery("foo", "two"))).boostMode("replace").scoreMode("sum") - .add(QueryBuilders.matchAllQuery(), weightFactorFunction(1)) - .add(QueryBuilders.termQuery("foo", "three"), weightFactorFunction(1)) - .add(QueryBuilders.termQuery("foo", "four"), weightFactorFunction(1))).scoreMode(scoreMode) - .minChildren(minChildren); + QueryBuilders.functionScoreQuery(constantScoreQuery(QueryBuilders.termQuery("foo", "two")), + new FunctionScoreQueryBuilder.FilterFunctionBuilder[]{ + new FunctionScoreQueryBuilder.FilterFunctionBuilder(weightFactorFunction(1)), + new FunctionScoreQueryBuilder.FilterFunctionBuilder(QueryBuilders.termQuery("foo", "three"), weightFactorFunction(1)), + new FunctionScoreQueryBuilder.FilterFunctionBuilder(QueryBuilders.termQuery("foo", "four"), weightFactorFunction(1)) + }).boostMode(CombineFunction.REPLACE).scoreMode(FiltersFunctionScoreQuery.ScoreMode.SUM)).scoreMode(scoreMode).minChildren(minChildren); if (maxChildren != null) { hasChildQuery.maxChildren(maxChildren); @@ -1610,7 +1586,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { SearchResponse response; // Score mode = NONE - response = minMaxQuery("none", 0, null); + response = minMaxQuery(ScoreMode.None, 0, 0); assertThat(response.getHits().totalHits(), equalTo(3l)); assertThat(response.getHits().hits()[0].id(), equalTo("2")); @@ -1620,7 +1596,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { assertThat(response.getHits().hits()[2].id(), equalTo("4")); assertThat(response.getHits().hits()[2].score(), equalTo(1f)); - response = minMaxQuery("none", 1, null); + response = minMaxQuery(ScoreMode.None, 1, 0); assertThat(response.getHits().totalHits(), equalTo(3l)); assertThat(response.getHits().hits()[0].id(), equalTo("2")); @@ -1630,7 +1606,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { assertThat(response.getHits().hits()[2].id(), equalTo("4")); assertThat(response.getHits().hits()[2].score(), equalTo(1f)); - response = minMaxQuery("none", 2, null); + response = minMaxQuery(ScoreMode.None, 2, 0); assertThat(response.getHits().totalHits(), equalTo(2l)); assertThat(response.getHits().hits()[0].id(), equalTo("3")); @@ -1638,17 +1614,17 @@ public class ChildQuerySearchIT extends ESIntegTestCase { assertThat(response.getHits().hits()[1].id(), equalTo("4")); assertThat(response.getHits().hits()[1].score(), equalTo(1f)); - response = minMaxQuery("none", 3, null); + response = minMaxQuery(ScoreMode.None, 3, 0); assertThat(response.getHits().totalHits(), equalTo(1l)); assertThat(response.getHits().hits()[0].id(), equalTo("4")); assertThat(response.getHits().hits()[0].score(), equalTo(1f)); - response = minMaxQuery("none", 4, null); + response = minMaxQuery(ScoreMode.None, 4, 0); assertThat(response.getHits().totalHits(), equalTo(0l)); - response = minMaxQuery("none", 0, 4); + response = minMaxQuery(ScoreMode.None, 0, 4); assertThat(response.getHits().totalHits(), equalTo(3l)); assertThat(response.getHits().hits()[0].id(), equalTo("2")); @@ -1658,7 +1634,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { assertThat(response.getHits().hits()[2].id(), equalTo("4")); assertThat(response.getHits().hits()[2].score(), equalTo(1f)); - response = minMaxQuery("none", 0, 3); + response = minMaxQuery(ScoreMode.None, 0, 3); assertThat(response.getHits().totalHits(), equalTo(3l)); assertThat(response.getHits().hits()[0].id(), equalTo("2")); @@ -1668,7 +1644,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { assertThat(response.getHits().hits()[2].id(), equalTo("4")); assertThat(response.getHits().hits()[2].score(), equalTo(1f)); - response = minMaxQuery("none", 0, 2); + response = minMaxQuery(ScoreMode.None, 0, 2); assertThat(response.getHits().totalHits(), equalTo(2l)); assertThat(response.getHits().hits()[0].id(), equalTo("2")); @@ -1676,21 +1652,21 @@ public class ChildQuerySearchIT extends ESIntegTestCase { assertThat(response.getHits().hits()[1].id(), equalTo("3")); assertThat(response.getHits().hits()[1].score(), equalTo(1f)); - response = minMaxQuery("none", 2, 2); + response = minMaxQuery(ScoreMode.None, 2, 2); assertThat(response.getHits().totalHits(), equalTo(1l)); assertThat(response.getHits().hits()[0].id(), equalTo("3")); assertThat(response.getHits().hits()[0].score(), equalTo(1f)); try { - response = minMaxQuery("none", 3, 2); + response = minMaxQuery(ScoreMode.None, 3, 2); fail(); } catch (SearchPhaseExecutionException e) { assertThat(e.toString(), containsString("[has_child] 'max_children' is less than 'min_children'")); } - // Score mode = TOTAL - response = minMaxQuery("total", 0, null); + // Score mode = SUM + response = minMaxQuery(ScoreMode.Total, 0, 0); assertThat(response.getHits().totalHits(), equalTo(3l)); assertThat(response.getHits().hits()[0].id(), equalTo("4")); @@ -1700,7 +1676,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { assertThat(response.getHits().hits()[2].id(), equalTo("2")); assertThat(response.getHits().hits()[2].score(), equalTo(1f)); - response = minMaxQuery("total", 1, null); + response = minMaxQuery(ScoreMode.Total, 1, 0); assertThat(response.getHits().totalHits(), equalTo(3l)); assertThat(response.getHits().hits()[0].id(), equalTo("4")); @@ -1710,7 +1686,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { assertThat(response.getHits().hits()[2].id(), equalTo("2")); assertThat(response.getHits().hits()[2].score(), equalTo(1f)); - response = minMaxQuery("total", 2, null); + response = minMaxQuery(ScoreMode.Total, 2, 0); assertThat(response.getHits().totalHits(), equalTo(2l)); assertThat(response.getHits().hits()[0].id(), equalTo("4")); @@ -1718,17 +1694,17 @@ public class ChildQuerySearchIT extends ESIntegTestCase { assertThat(response.getHits().hits()[1].id(), equalTo("3")); assertThat(response.getHits().hits()[1].score(), equalTo(3f)); - response = minMaxQuery("total", 3, null); + response = minMaxQuery(ScoreMode.Total, 3, 0); assertThat(response.getHits().totalHits(), equalTo(1l)); assertThat(response.getHits().hits()[0].id(), equalTo("4")); assertThat(response.getHits().hits()[0].score(), equalTo(6f)); - response = minMaxQuery("total", 4, null); + response = minMaxQuery(ScoreMode.Total, 4, 0); assertThat(response.getHits().totalHits(), equalTo(0l)); - response = minMaxQuery("total", 0, 4); + response = minMaxQuery(ScoreMode.Total, 0, 4); assertThat(response.getHits().totalHits(), equalTo(3l)); assertThat(response.getHits().hits()[0].id(), equalTo("4")); @@ -1738,7 +1714,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { assertThat(response.getHits().hits()[2].id(), equalTo("2")); assertThat(response.getHits().hits()[2].score(), equalTo(1f)); - response = minMaxQuery("total", 0, 3); + response = minMaxQuery(ScoreMode.Total, 0, 3); assertThat(response.getHits().totalHits(), equalTo(3l)); assertThat(response.getHits().hits()[0].id(), equalTo("4")); @@ -1748,7 +1724,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { assertThat(response.getHits().hits()[2].id(), equalTo("2")); assertThat(response.getHits().hits()[2].score(), equalTo(1f)); - response = minMaxQuery("total", 0, 2); + response = minMaxQuery(ScoreMode.Total, 0, 2); assertThat(response.getHits().totalHits(), equalTo(2l)); assertThat(response.getHits().hits()[0].id(), equalTo("3")); @@ -1756,21 +1732,21 @@ public class ChildQuerySearchIT extends ESIntegTestCase { assertThat(response.getHits().hits()[1].id(), equalTo("2")); assertThat(response.getHits().hits()[1].score(), equalTo(1f)); - response = minMaxQuery("total", 2, 2); + response = minMaxQuery(ScoreMode.Total, 2, 2); assertThat(response.getHits().totalHits(), equalTo(1l)); assertThat(response.getHits().hits()[0].id(), equalTo("3")); assertThat(response.getHits().hits()[0].score(), equalTo(3f)); try { - response = minMaxQuery("total", 3, 2); + response = minMaxQuery(ScoreMode.Total, 3, 2); fail(); } catch (SearchPhaseExecutionException e) { assertThat(e.toString(), containsString("[has_child] 'max_children' is less than 'min_children'")); } // Score mode = MAX - response = minMaxQuery("max", 0, null); + response = minMaxQuery(ScoreMode.Max, 0, 0); assertThat(response.getHits().totalHits(), equalTo(3l)); assertThat(response.getHits().hits()[0].id(), equalTo("4")); @@ -1780,7 +1756,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { assertThat(response.getHits().hits()[2].id(), equalTo("2")); assertThat(response.getHits().hits()[2].score(), equalTo(1f)); - response = minMaxQuery("max", 1, null); + response = minMaxQuery(ScoreMode.Max, 1, 0); assertThat(response.getHits().totalHits(), equalTo(3l)); assertThat(response.getHits().hits()[0].id(), equalTo("4")); @@ -1790,7 +1766,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { assertThat(response.getHits().hits()[2].id(), equalTo("2")); assertThat(response.getHits().hits()[2].score(), equalTo(1f)); - response = minMaxQuery("max", 2, null); + response = minMaxQuery(ScoreMode.Max, 2, 0); assertThat(response.getHits().totalHits(), equalTo(2l)); assertThat(response.getHits().hits()[0].id(), equalTo("4")); @@ -1798,17 +1774,17 @@ public class ChildQuerySearchIT extends ESIntegTestCase { assertThat(response.getHits().hits()[1].id(), equalTo("3")); assertThat(response.getHits().hits()[1].score(), equalTo(2f)); - response = minMaxQuery("max", 3, null); + response = minMaxQuery(ScoreMode.Max, 3, 0); assertThat(response.getHits().totalHits(), equalTo(1l)); assertThat(response.getHits().hits()[0].id(), equalTo("4")); assertThat(response.getHits().hits()[0].score(), equalTo(3f)); - response = minMaxQuery("max", 4, null); + response = minMaxQuery(ScoreMode.Max, 4, 0); assertThat(response.getHits().totalHits(), equalTo(0l)); - response = minMaxQuery("max", 0, 4); + response = minMaxQuery(ScoreMode.Max, 0, 4); assertThat(response.getHits().totalHits(), equalTo(3l)); assertThat(response.getHits().hits()[0].id(), equalTo("4")); @@ -1818,7 +1794,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { assertThat(response.getHits().hits()[2].id(), equalTo("2")); assertThat(response.getHits().hits()[2].score(), equalTo(1f)); - response = minMaxQuery("max", 0, 3); + response = minMaxQuery(ScoreMode.Max, 0, 3); assertThat(response.getHits().totalHits(), equalTo(3l)); assertThat(response.getHits().hits()[0].id(), equalTo("4")); @@ -1828,7 +1804,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { assertThat(response.getHits().hits()[2].id(), equalTo("2")); assertThat(response.getHits().hits()[2].score(), equalTo(1f)); - response = minMaxQuery("max", 0, 2); + response = minMaxQuery(ScoreMode.Max, 0, 2); assertThat(response.getHits().totalHits(), equalTo(2l)); assertThat(response.getHits().hits()[0].id(), equalTo("3")); @@ -1836,21 +1812,21 @@ public class ChildQuerySearchIT extends ESIntegTestCase { assertThat(response.getHits().hits()[1].id(), equalTo("2")); assertThat(response.getHits().hits()[1].score(), equalTo(1f)); - response = minMaxQuery("max", 2, 2); + response = minMaxQuery(ScoreMode.Max, 2, 2); assertThat(response.getHits().totalHits(), equalTo(1l)); assertThat(response.getHits().hits()[0].id(), equalTo("3")); assertThat(response.getHits().hits()[0].score(), equalTo(2f)); try { - response = minMaxQuery("max", 3, 2); + response = minMaxQuery(ScoreMode.Max, 3, 2); fail(); } catch (SearchPhaseExecutionException e) { assertThat(e.toString(), containsString("[has_child] 'max_children' is less than 'min_children'")); } // Score mode = AVG - response = minMaxQuery("avg", 0, null); + response = minMaxQuery(ScoreMode.Avg, 0, 0); assertThat(response.getHits().totalHits(), equalTo(3l)); assertThat(response.getHits().hits()[0].id(), equalTo("4")); @@ -1860,7 +1836,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { assertThat(response.getHits().hits()[2].id(), equalTo("2")); assertThat(response.getHits().hits()[2].score(), equalTo(1f)); - response = minMaxQuery("avg", 1, null); + response = minMaxQuery(ScoreMode.Avg, 1, 0); assertThat(response.getHits().totalHits(), equalTo(3l)); assertThat(response.getHits().hits()[0].id(), equalTo("4")); @@ -1870,7 +1846,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { assertThat(response.getHits().hits()[2].id(), equalTo("2")); assertThat(response.getHits().hits()[2].score(), equalTo(1f)); - response = minMaxQuery("avg", 2, null); + response = minMaxQuery(ScoreMode.Avg, 2, 0); assertThat(response.getHits().totalHits(), equalTo(2l)); assertThat(response.getHits().hits()[0].id(), equalTo("4")); @@ -1878,17 +1854,17 @@ public class ChildQuerySearchIT extends ESIntegTestCase { assertThat(response.getHits().hits()[1].id(), equalTo("3")); assertThat(response.getHits().hits()[1].score(), equalTo(1.5f)); - response = minMaxQuery("avg", 3, null); + response = minMaxQuery(ScoreMode.Avg, 3, 0); assertThat(response.getHits().totalHits(), equalTo(1l)); assertThat(response.getHits().hits()[0].id(), equalTo("4")); assertThat(response.getHits().hits()[0].score(), equalTo(2f)); - response = minMaxQuery("avg", 4, null); + response = minMaxQuery(ScoreMode.Avg, 4, 0); assertThat(response.getHits().totalHits(), equalTo(0l)); - response = minMaxQuery("avg", 0, 4); + response = minMaxQuery(ScoreMode.Avg, 0, 4); assertThat(response.getHits().totalHits(), equalTo(3l)); assertThat(response.getHits().hits()[0].id(), equalTo("4")); @@ -1898,7 +1874,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { assertThat(response.getHits().hits()[2].id(), equalTo("2")); assertThat(response.getHits().hits()[2].score(), equalTo(1f)); - response = minMaxQuery("avg", 0, 3); + response = minMaxQuery(ScoreMode.Avg, 0, 3); assertThat(response.getHits().totalHits(), equalTo(3l)); assertThat(response.getHits().hits()[0].id(), equalTo("4")); @@ -1908,7 +1884,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { assertThat(response.getHits().hits()[2].id(), equalTo("2")); assertThat(response.getHits().hits()[2].score(), equalTo(1f)); - response = minMaxQuery("avg", 0, 2); + response = minMaxQuery(ScoreMode.Avg, 0, 2); assertThat(response.getHits().totalHits(), equalTo(2l)); assertThat(response.getHits().hits()[0].id(), equalTo("3")); @@ -1916,14 +1892,14 @@ public class ChildQuerySearchIT extends ESIntegTestCase { assertThat(response.getHits().hits()[1].id(), equalTo("2")); assertThat(response.getHits().hits()[1].score(), equalTo(1f)); - response = minMaxQuery("avg", 2, 2); + response = minMaxQuery(ScoreMode.Avg, 2, 2); assertThat(response.getHits().totalHits(), equalTo(1l)); assertThat(response.getHits().hits()[0].id(), equalTo("3")); assertThat(response.getHits().hits()[0].score(), equalTo(1.5f)); try { - response = minMaxQuery("avg", 3, 2); + response = minMaxQuery(ScoreMode.Avg, 3, 2); fail(); } catch (SearchPhaseExecutionException e) { assertThat(e.toString(), containsString("[has_child] 'max_children' is less than 'min_children'")); diff --git a/core/src/test/java/org/elasticsearch/search/fetch/FetchSubPhasePluginIT.java b/core/src/test/java/org/elasticsearch/search/fetch/FetchSubPhasePluginIT.java index e07bb73d6a6..6280f4af524 100644 --- a/core/src/test/java/org/elasticsearch/search/fetch/FetchSubPhasePluginIT.java +++ b/core/src/test/java/org/elasticsearch/search/fetch/FetchSubPhasePluginIT.java @@ -28,8 +28,10 @@ import org.elasticsearch.action.termvectors.TermVectorsRequest; import org.elasticsearch.action.termvectors.TermVectorsResponse; import org.elasticsearch.common.Priority; import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.termvectors.TermVectorsService; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.SearchHitField; import org.elasticsearch.search.SearchModule; @@ -166,7 +168,7 @@ public class FetchSubPhasePluginIT extends ESIntegTestCase { hitField = new InternalSearchHitField(NAMES[0], new ArrayList<>(1)); hitContext.hit().fields().put(NAMES[0], hitField); } - TermVectorsResponse termVector = context.indexShard().termVectorsService().getTermVectors(new TermVectorsRequest(context.indexShard().indexService().index().getName(), hitContext.hit().type(), hitContext.hit().id()), context.indexShard().indexService().index().getName()); + TermVectorsResponse termVector = context.indexShard().getTermVectors(new TermVectorsRequest(context.indexShard().indexService().index().getName(), hitContext.hit().type(), hitContext.hit().id())); try { Map tv = new HashMap<>(); TermsEnum terms = termVector.getFields().terms(field).iterator(); diff --git a/core/src/test/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java b/core/src/test/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java index e2859708aba..c2c2782caa6 100644 --- a/core/src/test/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java +++ b/core/src/test/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java @@ -26,12 +26,13 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.lucene.search.function.CombineFunction; +import org.elasticsearch.common.lucene.search.function.FiltersFunctionScoreQuery; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.index.query.QueryBuilders; -import org.elasticsearch.index.query.functionscore.DecayFunctionBuilder; -import org.elasticsearch.index.query.functionscore.gauss.GaussDecayFunctionBuilder; +import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder; +import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder; +import org.elasticsearch.search.MultiValueMode; import org.elasticsearch.search.SearchHits; import org.elasticsearch.test.ESIntegTestCase; import org.joda.time.DateTime; @@ -41,8 +42,8 @@ import org.junit.Test; import java.io.IOException; import java.util.ArrayList; import java.util.List; -import java.util.concurrent.ExecutionException; import java.util.Locale; +import java.util.concurrent.ExecutionException; import static org.elasticsearch.client.Requests.indexRequest; import static org.elasticsearch.client.Requests.searchRequest; @@ -185,8 +186,8 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source( searchSource() .size(numDummyDocs + 2) - .query(functionScoreQuery(termQuery("test", "value"), gaussDecayFunction("num", 1.0, 5.0).setOffset(1.0)) - .boostMode(CombineFunction.REPLACE.getName())))); + .query(functionScoreQuery(termQuery("test", "value"), gaussDecayFunction("num", 1.0, 5.0, 1.0)) + .boostMode(CombineFunction.REPLACE)))); SearchResponse sr = response.actionGet(); SearchHits sh = sr.getHits(); assertThat(sh.getTotalHits(), equalTo((long) (numDummyDocs + 2))); @@ -204,8 +205,8 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { searchSource() .size(numDummyDocs + 2) .query(functionScoreQuery(termQuery("test", "value"), - exponentialDecayFunction("num", 1.0, 5.0).setOffset(1.0)).boostMode( - CombineFunction.REPLACE.getName())))); + exponentialDecayFunction("num", 1.0, 5.0, 1.0)).boostMode( + CombineFunction.REPLACE)))); sr = response.actionGet(); sh = sr.getHits(); assertThat(sh.getTotalHits(), equalTo((long) (numDummyDocs + 2))); @@ -220,8 +221,8 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source( searchSource() .size(numDummyDocs + 2) - .query(functionScoreQuery(termQuery("test", "value"), linearDecayFunction("num", 1.0, 20.0).setOffset(1.0)) - .boostMode(CombineFunction.REPLACE.getName())))); + .query(functionScoreQuery(termQuery("test", "value"), linearDecayFunction("num", 1.0, 20.0, 1.0)) + .boostMode(CombineFunction.REPLACE)))); sr = response.actionGet(); sh = sr.getHits(); assertThat(sh.getTotalHits(), equalTo((long) (numDummyDocs + 2))); @@ -264,7 +265,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source( searchSource().query( functionScoreQuery(termQuery("test", "value"), gaussDecayFunction("loc", lonlat, "1000km")).boostMode( - CombineFunction.MULT.getName())))); + CombineFunction.MULTIPLY)))); SearchResponse sr = response.actionGet(); SearchHits sh = sr.getHits(); assertThat(sh.getTotalHits(), equalTo((long) (2))); @@ -276,7 +277,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source( searchSource().query( functionScoreQuery(termQuery("test", "value"), gaussDecayFunction("loc", lonlat, "1000km")).boostMode( - CombineFunction.REPLACE.getName())))); + CombineFunction.REPLACE)))); sr = response.actionGet(); sh = sr.getHits(); assertThat(sh.getTotalHits(), equalTo((long) (2))); @@ -306,7 +307,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source( searchSource().query( functionScoreQuery(termQuery("test", "value"), gaussDecayFunction("loc", point, "1000km")).boostMode( - CombineFunction.MULT.getName())))); + CombineFunction.MULTIPLY)))); SearchResponse sr = response.actionGet(); SearchHits sh = sr.getHits(); assertThat(sh.getTotalHits(), equalTo((long) (1))); @@ -318,7 +319,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source( searchSource().query( functionScoreQuery(termQuery("test", "value"), gaussDecayFunction("loc", coords, "1000km")).boostMode( - CombineFunction.MULT.getName())))); + CombineFunction.MULTIPLY)))); sr = response.actionGet(); sh = sr.getHits(); assertThat(sh.getTotalHits(), equalTo((long) (1))); @@ -343,8 +344,8 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { ActionFuture response = client().search( searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source( searchSource().query( - functionScoreQuery(termQuery("test", "value"), gaussDecayFunction("num", 0.0, 1.0).setDecay(0.5)).boost( - 2.0f).boostMode(CombineFunction.MULT)))); + functionScoreQuery(termQuery("test", "value"), gaussDecayFunction("num", 0.0, 1.0, null, 0.5)).boost( + 2.0f).boostMode(CombineFunction.MULTIPLY)))); SearchResponse sr = response.actionGet(); SearchHits sh = sr.getHits(); assertThat(sh.getTotalHits(), equalTo((long) (1))); @@ -354,7 +355,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { response = client().search( searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source( searchSource().query( - functionScoreQuery(termQuery("test", "value"), gaussDecayFunction("num", 0.0, 1.0).setDecay(0.5)).boost( + functionScoreQuery(termQuery("test", "value"), gaussDecayFunction("num", 0.0, 1.0, null, 0.5)).boost( 2.0f).boostMode(CombineFunction.REPLACE)))); sr = response.actionGet(); sh = sr.getHits(); @@ -365,7 +366,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { response = client().search( searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source( searchSource().query( - functionScoreQuery(termQuery("test", "value"), gaussDecayFunction("num", 0.0, 1.0).setDecay(0.5)).boost( + functionScoreQuery(termQuery("test", "value"), gaussDecayFunction("num", 0.0, 1.0, null, 0.5)).boost( 2.0f).boostMode(CombineFunction.SUM)))); sr = response.actionGet(); sh = sr.getHits(); @@ -377,7 +378,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { response = client().search( searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source( searchSource().query( - functionScoreQuery(termQuery("test", "value"), gaussDecayFunction("num", 0.0, 1.0).setDecay(0.5)).boost( + functionScoreQuery(termQuery("test", "value"), gaussDecayFunction("num", 0.0, 1.0, null, 0.5)).boost( 2.0f).boostMode(CombineFunction.AVG)))); sr = response.actionGet(); sh = sr.getHits(); @@ -388,7 +389,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { response = client().search( searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source( searchSource().query( - functionScoreQuery(termQuery("test", "value"), gaussDecayFunction("num", 0.0, 1.0).setDecay(0.5)).boost( + functionScoreQuery(termQuery("test", "value"), gaussDecayFunction("num", 0.0, 1.0, null, 0.5)).boost( 2.0f).boostMode(CombineFunction.MIN)))); sr = response.actionGet(); sh = sr.getHits(); @@ -399,7 +400,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { response = client().search( searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source( searchSource().query( - functionScoreQuery(termQuery("test", "value"), gaussDecayFunction("num", 0.0, 1.0).setDecay(0.5)).boost( + functionScoreQuery(termQuery("test", "value"), gaussDecayFunction("num", 0.0, 1.0, null, 0.5)).boost( 2.0f).boostMode(CombineFunction.MAX)))); sr = response.actionGet(); sh = sr.getHits(); @@ -467,11 +468,6 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { } - @Test(expected = IllegalStateException.class) - public void testExceptionThrownIfScaleRefNotBetween0And1() throws Exception { - DecayFunctionBuilder gfb = new GaussDecayFunctionBuilder("num1", "2013-05-28", "1d").setDecay(100); - } - @Test public void testValueMissingLin() throws Exception { @@ -504,8 +500,10 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { ActionFuture response = client().search( searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source( searchSource().query( - functionScoreQuery(constantScoreQuery(termQuery("test", "value"))).add(linearDecayFunction("num1", "2013-05-28", "+3d")) - .add(linearDecayFunction("num2", "0.0", "1")).scoreMode("multiply")))); + functionScoreQuery(constantScoreQuery(termQuery("test", "value")), new FunctionScoreQueryBuilder.FilterFunctionBuilder[]{ + new FunctionScoreQueryBuilder.FilterFunctionBuilder(linearDecayFunction("num1", "2013-05-28", "+3d")), + new FunctionScoreQueryBuilder.FilterFunctionBuilder(linearDecayFunction("num2", "0.0", "1")) + }).scoreMode(FiltersFunctionScoreQuery.ScoreMode.MULTIPLY)))); SearchResponse sr = response.actionGet(); @@ -552,9 +550,11 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { ActionFuture response = client().search( searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source( searchSource().query( - functionScoreQuery(QueryBuilders.matchAllQuery()).add(linearDecayFunction("num1", "1000w")) - .add(gaussDecayFunction("num1", "1d")).add(exponentialDecayFunction("num1", "1000w")) - .scoreMode("multiply")))); + functionScoreQuery(QueryBuilders.matchAllQuery(), new FunctionScoreQueryBuilder.FilterFunctionBuilder[]{ + new FunctionScoreQueryBuilder.FilterFunctionBuilder(linearDecayFunction("num1", null, "1000w")), + new FunctionScoreQueryBuilder.FilterFunctionBuilder(gaussDecayFunction("num1", null, "1d")), + new FunctionScoreQueryBuilder.FilterFunctionBuilder(exponentialDecayFunction("num1", null, "1000w")) + }).scoreMode(FiltersFunctionScoreQuery.ScoreMode.MULTIPLY)))); SearchResponse sr = response.actionGet(); assertNoFailures(sr); @@ -603,11 +603,11 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { ActionFuture response = client().search( searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source( searchSource().size(numDocs).query( - functionScoreQuery(termQuery("test", "value")) - .add(new MatchAllQueryBuilder(), linearDecayFunction("date", "2013-05-30", "+15d")) - .add(new MatchAllQueryBuilder(), linearDecayFunction("geo", lonlat, "1000km")) - .add(new MatchAllQueryBuilder(), linearDecayFunction("num", numDocs, numDocs / 2.0)) - .scoreMode("multiply").boostMode(CombineFunction.REPLACE.getName())))); + functionScoreQuery(termQuery("test", "value"), new FunctionScoreQueryBuilder.FilterFunctionBuilder[]{ + new FunctionScoreQueryBuilder.FilterFunctionBuilder(linearDecayFunction("date", "2013-05-30", "+15d")), + new FunctionScoreQueryBuilder.FilterFunctionBuilder(linearDecayFunction("geo", lonlat, "1000km")), + new FunctionScoreQueryBuilder.FilterFunctionBuilder(linearDecayFunction("num", numDocs, numDocs / 2.0)) + }).scoreMode(FiltersFunctionScoreQuery.ScoreMode.MULTIPLY).boostMode(CombineFunction.REPLACE)))); SearchResponse sr = response.actionGet(); assertNoFailures(sr); @@ -642,10 +642,9 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source( searchSource() .size(numDocs) - .query(functionScoreQuery(termQuery("test", "value")).add(new MatchAllQueryBuilder(), - linearDecayFunction("type1.geo", lonlat, "1000km")).scoreMode("multiply")))); + .query(functionScoreQuery(termQuery("test", "value"), linearDecayFunction("type1.geo", lonlat, "1000km")) + .scoreMode(FiltersFunctionScoreQuery.ScoreMode.MULTIPLY)))); SearchResponse sr = response.actionGet(); - } @Test(expected = SearchPhaseExecutionException.class) @@ -663,8 +662,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { ActionFuture response = client().search( searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source( searchSource().query( - functionScoreQuery(termQuery("test", "value")).add(new MatchAllQueryBuilder(), - linearDecayFunction("num", 1.0, 0.5)).scoreMode("multiply")))); + functionScoreQuery(termQuery("test", "value"), linearDecayFunction("num", 1.0, 0.5)).scoreMode(FiltersFunctionScoreQuery.ScoreMode.MULTIPLY)))); response.actionGet(); } @@ -683,8 +681,8 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { ActionFuture response = client().search( searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source( searchSource().query( - functionScoreQuery().add(new MatchAllQueryBuilder(), linearDecayFunction("num", 1, 0.5)).scoreMode( - "multiply")))); + functionScoreQuery(linearDecayFunction("num", 1, 0.5)).scoreMode( + FiltersFunctionScoreQuery.ScoreMode.MULTIPLY)))); response.actionGet(); } @@ -728,7 +726,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { response = client().search( searchRequest().source( searchSource().query( - functionScoreQuery(constantScoreQuery(termQuery("test", "value")), gaussDecayFunction("loc", lonlat, "1000km").setMultiValueMode("min"))))); + functionScoreQuery(constantScoreQuery(termQuery("test", "value")), gaussDecayFunction("loc", lonlat, "1000km").setMultiValueMode(MultiValueMode.MIN))))); sr = response.actionGet(); assertSearchHits(sr, "1", "2"); sh = sr.getHits(); @@ -738,7 +736,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { response = client().search( searchRequest().source( searchSource().query( - functionScoreQuery(constantScoreQuery(termQuery("test", "value")), gaussDecayFunction("loc", lonlat, "1000km").setMultiValueMode("max"))))); + functionScoreQuery(constantScoreQuery(termQuery("test", "value")), gaussDecayFunction("loc", lonlat, "1000km").setMultiValueMode(MultiValueMode.MAX))))); sr = response.actionGet(); assertSearchHits(sr, "1", "2"); sh = sr.getHits(); @@ -767,7 +765,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { response = client().search( searchRequest().source( searchSource().query( - functionScoreQuery(constantScoreQuery(termQuery("test", "value")), linearDecayFunction("num", "0", "10").setMultiValueMode("sum"))))); + functionScoreQuery(constantScoreQuery(termQuery("test", "value")), linearDecayFunction("num", "0", "10").setMultiValueMode(MultiValueMode.SUM))))); sr = response.actionGet(); assertSearchHits(sr, "1", "2"); sh = sr.getHits(); @@ -778,7 +776,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { response = client().search( searchRequest().source( searchSource().query( - functionScoreQuery(constantScoreQuery(termQuery("test", "value")), linearDecayFunction("num", "0", "10").setMultiValueMode("avg"))))); + functionScoreQuery(constantScoreQuery(termQuery("test", "value")), linearDecayFunction("num", "0", "10").setMultiValueMode(MultiValueMode.AVG))))); sr = response.actionGet(); assertSearchHits(sr, "1", "2"); sh = sr.getHits(); @@ -826,97 +824,6 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { } } - // issue https://github.com/elasticsearch/elasticsearch/issues/6292 - @Test - public void testMissingFunctionThrowsElasticsearchParseException() throws IOException { - - // example from issue https://github.com/elasticsearch/elasticsearch/issues/6292 - String doc = "{\n" + - " \"text\": \"baseball bats\"\n" + - "}\n"; - - String query = "{\n" + - " \"function_score\": {\n" + - " \"score_mode\": \"sum\",\n" + - " \"boost_mode\": \"replace\",\n" + - " \"functions\": [\n" + - " {\n" + - " \"filter\": {\n" + - " \"term\": {\n" + - " \"text\": \"baseball\"\n" + - " }\n" + - " }\n" + - " }\n" + - " ]\n" + - " }\n" + - "}\n"; - - client().prepareIndex("t", "test").setSource(doc).get(); - refresh(); - ensureYellow("t"); - try { - client().search( - searchRequest().source( - searchSource().query(query))).actionGet(); - fail("Should fail with SearchPhaseExecutionException"); - } catch (SearchPhaseExecutionException failure) { - assertThat(failure.toString(), containsString("SearchParseException")); - assertThat(failure.toString(), not(containsString("NullPointerException"))); - } - - query = "{\n" + - " \"function_score\": {\n" + - " \"score_mode\": \"sum\",\n" + - " \"boost_mode\": \"replace\",\n" + - " \"functions\": [\n" + - " {\n" + - " \"filter\": {\n" + - " \"term\": {\n" + - " \"text\": \"baseball\"\n" + - " }\n" + - " },\n" + - " \"weight\": 2\n" + - " },\n" + - " {\n" + - " \"filter\": {\n" + - " \"term\": {\n" + - " \"text\": \"baseball\"\n" + - " }\n" + - " }\n" + - " }\n" + - " ]\n" + - " }\n" + - "}"; - - try { - client().search( - searchRequest().source( - searchSource().query(query))).actionGet(); - fail("Should fail with SearchPhaseExecutionException"); - } catch (SearchPhaseExecutionException failure) { - assertThat(failure.toString(), containsString("SearchParseException")); - assertThat(failure.toString(), not(containsString("NullPointerException"))); - assertThat(failure.toString(), containsString("an entry in functions list is missing a function")); - } - - // next test java client - try { - client().prepareSearch("t").setQuery(QueryBuilders.functionScoreQuery(QueryBuilders.matchAllQuery(), null)).get(); - } catch (IllegalArgumentException failure) { - assertThat(failure.toString(), containsString("function must not be null")); - } - try { - client().prepareSearch("t").setQuery(QueryBuilders.functionScoreQuery().add(QueryBuilders.matchAllQuery(), null)).get(); - } catch (IllegalArgumentException failure) { - assertThat(failure.toString(), containsString("function must not be null")); - } - try { - client().prepareSearch("t").setQuery(QueryBuilders.functionScoreQuery().add(null)).get(); - } catch (IllegalArgumentException failure) { - assertThat(failure.toString(), containsString("function must not be null")); - } - } - @Test public void testExplainString() throws IOException, ExecutionException, InterruptedException { assertAcked(prepareCreate("test").addMapping( @@ -934,11 +841,11 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { SearchResponse response = client().search( searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source( searchSource().explain(true) - .query(functionScoreQuery(termQuery("test", "value")) - .add(gaussDecayFunction("num", 1.0, 5.0).setOffset(1.0)) - .add(linearDecayFunction("num", 1.0, 5.0).setOffset(1.0)) - .add(exponentialDecayFunction("num", 1.0, 5.0).setOffset(1.0)) - .boostMode(CombineFunction.REPLACE.getName())))).get(); + .query(functionScoreQuery(termQuery("test", "value"), new FunctionScoreQueryBuilder.FilterFunctionBuilder[]{ + new FunctionScoreQueryBuilder.FilterFunctionBuilder(gaussDecayFunction("num", 1.0, 5.0, 1.0)), + new FunctionScoreQueryBuilder.FilterFunctionBuilder(linearDecayFunction("num", 1.0, 5.0, 1.0)), + new FunctionScoreQueryBuilder.FilterFunctionBuilder(exponentialDecayFunction("num", 1.0, 5.0, 1.0)) + }).boostMode(CombineFunction.REPLACE)))).get(); String explanation = response.getHits().getAt(0).getExplanation().toString(); assertThat(explanation, containsString(" 1.0 = exp(-0.5*pow(MIN[Math.max(Math.abs(0.5(=doc value) - 1.0(=origin))) - 1.0(=offset), 0), Math.max(Math.abs(0.7(=doc value) - 1.0(=origin))) - 1.0(=offset), 0)],2.0)/18.033688011112044)")); assertThat(explanation, containsString("1.0 = max(0.0, ((10.0 - MIN[Math.max(Math.abs(0.5(=doc value) - 1.0(=origin))) - 1.0(=offset), 0), Math.max(Math.abs(0.7(=doc value) - 1.0(=origin))) - 1.0(=offset), 0)])/10.0)")); diff --git a/core/src/test/java/org/elasticsearch/search/functionscore/ExplainableScriptIT.java b/core/src/test/java/org/elasticsearch/search/functionscore/ExplainableScriptIT.java index 1b942f660da..eb7903f665a 100644 --- a/core/src/test/java/org/elasticsearch/search/functionscore/ExplainableScriptIT.java +++ b/core/src/test/java/org/elasticsearch/search/functionscore/ExplainableScriptIT.java @@ -24,14 +24,10 @@ import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.lucene.search.function.CombineFunction; import org.elasticsearch.index.fielddata.ScriptDocValues; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.script.AbstractDoubleSearchScript; -import org.elasticsearch.script.ExecutableScript; -import org.elasticsearch.script.ExplainableSearchScript; -import org.elasticsearch.script.NativeScriptFactory; -import org.elasticsearch.script.Script; +import org.elasticsearch.script.*; import org.elasticsearch.script.ScriptService.ScriptType; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; @@ -49,7 +45,6 @@ import java.util.Map; import java.util.concurrent.ExecutionException; import static org.elasticsearch.client.Requests.searchRequest; -import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.functionScoreQuery; import static org.elasticsearch.index.query.QueryBuilders.termQuery; @@ -79,9 +74,9 @@ public class ExplainableScriptIT extends ESIntegTestCase { ensureYellow(); SearchResponse response = client().search(searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source( searchSource().explain(true).query( - functionScoreQuery(termQuery("text", "text")).add( + functionScoreQuery(termQuery("text", "text"), scriptFunction(new Script("native_explainable_script", ScriptType.INLINE, "native", null))) - .boostMode("replace")))).actionGet(); + .boostMode(CombineFunction.REPLACE)))).actionGet(); ElasticsearchAssertions.assertNoFailures(response); SearchHits hits = response.getHits(); diff --git a/core/src/test/java/org/elasticsearch/search/functionscore/FunctionScoreBackwardCompatibilityIT.java b/core/src/test/java/org/elasticsearch/search/functionscore/FunctionScoreBackwardCompatibilityIT.java index 80c4968fda8..8bf957d283e 100644 --- a/core/src/test/java/org/elasticsearch/search/functionscore/FunctionScoreBackwardCompatibilityIT.java +++ b/core/src/test/java/org/elasticsearch/search/functionscore/FunctionScoreBackwardCompatibilityIT.java @@ -22,6 +22,7 @@ import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder; import org.elasticsearch.script.Script; import org.elasticsearch.test.ESBackcompatTestCase; import org.junit.Test; @@ -110,11 +111,12 @@ public class FunctionScoreBackwardCompatibilityIT extends ESBackcompatTestCase { SearchResponse response = client().search( searchRequest().source( searchSource().query( - functionScoreQuery(termQuery("text", "value")) - .add(gaussDecayFunction("loc", new GeoPoint(10, 20), "1000km")) - .add(scriptFunction(new Script("_index['text']['value'].tf()"))) - .add(termQuery("text", "boosted"), weightFactorFunction(5)) - ))).actionGet(); + functionScoreQuery(termQuery("text", "value"), new FunctionScoreQueryBuilder.FilterFunctionBuilder[] { + new FunctionScoreQueryBuilder.FilterFunctionBuilder(gaussDecayFunction("loc", new GeoPoint(10, 20), "1000km")), + new FunctionScoreQueryBuilder.FilterFunctionBuilder(scriptFunction(new Script("_index['text']['value'].tf()"))), + new FunctionScoreQueryBuilder.FilterFunctionBuilder(termQuery("text", "boosted"), weightFactorFunction(5)) + } + )))).actionGet(); assertSearchResponse(response); assertOrderedSearchHits(response, ids); } diff --git a/core/src/test/java/org/elasticsearch/search/functionscore/FunctionScoreFieldValueIT.java b/core/src/test/java/org/elasticsearch/search/functionscore/FunctionScoreFieldValueIT.java index 6f6a7195007..419861d2c77 100644 --- a/core/src/test/java/org/elasticsearch/search/functionscore/FunctionScoreFieldValueIT.java +++ b/core/src/test/java/org/elasticsearch/search/functionscore/FunctionScoreFieldValueIT.java @@ -21,7 +21,6 @@ package org.elasticsearch.search.functionscore; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.lucene.search.function.FieldValueFactorFunction; import org.elasticsearch.test.ESIntegTestCase; import org.junit.Test; @@ -126,34 +125,5 @@ public class FunctionScoreFieldValueIT extends ESIntegTestCase { // This is fine, the query will throw an exception if executed // locally, instead of just having failures } - - // don't permit an array of factors - try { - String querySource = "{" + - "\"query\": {" + - " \"function_score\": {" + - " \"query\": {" + - " \"match\": {\"name\": \"foo\"}" + - " }," + - " \"functions\": [" + - " {" + - " \"field_value_factor\": {" + - " \"field\": \"test\"," + - " \"factor\": [1.2,2]" + - " }" + - " }" + - " ]" + - " }" + - " }" + - "}"; - response = client().prepareSearch("test") - .setSource(new BytesArray(querySource)) - .get(); - assertFailures(response); - } catch (SearchPhaseExecutionException e) { - // This is fine, the query will throw an exception if executed - // locally, instead of just having failures - } - } } diff --git a/core/src/test/java/org/elasticsearch/search/functionscore/FunctionScoreIT.java b/core/src/test/java/org/elasticsearch/search/functionscore/FunctionScoreIT.java index d86e591afcb..4d78de1386c 100644 --- a/core/src/test/java/org/elasticsearch/search/functionscore/FunctionScoreIT.java +++ b/core/src/test/java/org/elasticsearch/search/functionscore/FunctionScoreIT.java @@ -25,8 +25,11 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.geo.GeoPoint; +import org.elasticsearch.common.lucene.search.function.CombineFunction; import org.elasticsearch.common.lucene.search.function.FieldValueFactorFunction; +import org.elasticsearch.common.lucene.search.function.FiltersFunctionScoreQuery; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder; import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder; import org.elasticsearch.index.query.functionscore.weight.WeightBuilder; @@ -90,21 +93,25 @@ public class FunctionScoreIT extends ESIntegTestCase { SearchResponse response = client().search( searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source( searchSource().explain(true).query( - functionScoreQuery(termQuery("test", "value")).add(gaussDecayFunction("num", 5, 5)).add(exponentialDecayFunction("num", 5, 5)).add(linearDecayFunction("num", 5, 5))))).get(); + functionScoreQuery(termQuery("test", "value"), new FunctionScoreQueryBuilder.FilterFunctionBuilder[]{ + new FunctionScoreQueryBuilder.FilterFunctionBuilder(gaussDecayFunction("num", 5, 5)), + new FunctionScoreQueryBuilder.FilterFunctionBuilder(exponentialDecayFunction("num", 5, 5)), + new FunctionScoreQueryBuilder.FilterFunctionBuilder(linearDecayFunction("num", 5, 5)) + })))).get(); String explanation = response.getHits().getAt(0).explanation().toString(); checkQueryExplanationAppearsOnlyOnce(explanation); response = client().search( searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source( searchSource().explain(true).query( - functionScoreQuery(termQuery("test", "value")).add(fieldValueFactorFunction("num"))))).get(); + functionScoreQuery(termQuery("test", "value"), fieldValueFactorFunction("num"))))).get(); explanation = response.getHits().getAt(0).explanation().toString(); checkQueryExplanationAppearsOnlyOnce(explanation); response = client().search( searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source( searchSource().explain(true).query( - functionScoreQuery(termQuery("test", "value")).add(randomFunction(10))))).get(); + functionScoreQuery(termQuery("test", "value"), randomFunction(10))))).get(); explanation = response.getHits().getAt(0).explanation().toString(); checkQueryExplanationAppearsOnlyOnce(explanation); @@ -171,11 +178,11 @@ public class FunctionScoreIT extends ESIntegTestCase { SearchResponse responseWithWeights = client().search( searchRequest().source( searchSource().query( - functionScoreQuery(constantScoreQuery(termQuery(TEXT_FIELD, "value"))) - .add(gaussDecayFunction(GEO_POINT_FIELD, new GeoPoint(10, 20), "1000km")) - .add(fieldValueFactorFunction(DOUBLE_FIELD).modifier(FieldValueFactorFunction.Modifier.LN).setWeight(2)) - .add(scriptFunction(new Script("_index['" + TEXT_FIELD + "']['value'].tf()")).setWeight(3))) - .explain(true))).actionGet(); + functionScoreQuery(constantScoreQuery(termQuery(TEXT_FIELD, "value")), new FunctionScoreQueryBuilder.FilterFunctionBuilder[]{ + new FunctionScoreQueryBuilder.FilterFunctionBuilder(gaussDecayFunction(GEO_POINT_FIELD, new GeoPoint(10, 20), "1000km")), + new FunctionScoreQueryBuilder.FilterFunctionBuilder(fieldValueFactorFunction(DOUBLE_FIELD).modifier(FieldValueFactorFunction.Modifier.LN).setWeight(2)), + new FunctionScoreQueryBuilder.FilterFunctionBuilder(scriptFunction(new Script("_index['" + TEXT_FIELD + "']['value'].tf()")).setWeight(3)) + })).explain(true))).actionGet(); assertThat( responseWithWeights.getHits().getAt(0).getExplanation().toString(), @@ -183,7 +190,7 @@ public class FunctionScoreIT extends ESIntegTestCase { responseWithWeights = client().search( searchRequest().source( searchSource().query( - functionScoreQuery(constantScoreQuery(termQuery(TEXT_FIELD, "value"))).add(weightFactorFunction(4.0f))) + functionScoreQuery(constantScoreQuery(termQuery(TEXT_FIELD, "value")), weightFactorFunction(4.0f))) .explain(true))).actionGet(); assertThat( responseWithWeights.getHits().getAt(0).getExplanation().toString(), @@ -203,19 +210,19 @@ public class FunctionScoreIT extends ESIntegTestCase { SearchResponse response = client().search( searchRequest().source( searchSource().query( - functionScoreQuery(constantScoreQuery(termQuery(TEXT_FIELD, "value"))) - .add(gaussDecayFunction(GEO_POINT_FIELD, new GeoPoint(10, 20), "1000km")) - .add(fieldValueFactorFunction(DOUBLE_FIELD).modifier(FieldValueFactorFunction.Modifier.LN)) - .add(scriptFunction(new Script("_index['" + TEXT_FIELD + "']['value'].tf()")))))).actionGet(); + functionScoreQuery(constantScoreQuery(termQuery(TEXT_FIELD, "value")), new FunctionScoreQueryBuilder.FilterFunctionBuilder[]{ + new FunctionScoreQueryBuilder.FilterFunctionBuilder(gaussDecayFunction(GEO_POINT_FIELD, new GeoPoint(10, 20), "1000km")), + new FunctionScoreQueryBuilder.FilterFunctionBuilder(fieldValueFactorFunction(DOUBLE_FIELD).modifier(FieldValueFactorFunction.Modifier.LN)), + new FunctionScoreQueryBuilder.FilterFunctionBuilder(scriptFunction(new Script("_index['" + TEXT_FIELD + "']['value'].tf()"))) + })))).actionGet(); SearchResponse responseWithWeights = client().search( searchRequest().source( searchSource().query( - functionScoreQuery(constantScoreQuery(termQuery(TEXT_FIELD, "value"))) - .add(gaussDecayFunction(GEO_POINT_FIELD, new GeoPoint(10, 20), "1000km").setWeight(2)) - .add(fieldValueFactorFunction(DOUBLE_FIELD).modifier(FieldValueFactorFunction.Modifier.LN) - .setWeight(2)) - .add(scriptFunction(new Script("_index['" + TEXT_FIELD + "']['value'].tf()")).setWeight(2))))) - .actionGet(); + functionScoreQuery(constantScoreQuery(termQuery(TEXT_FIELD, "value")), new FunctionScoreQueryBuilder.FilterFunctionBuilder[]{ + new FunctionScoreQueryBuilder.FilterFunctionBuilder(gaussDecayFunction(GEO_POINT_FIELD, new GeoPoint(10, 20), "1000km").setWeight(2)), + new FunctionScoreQueryBuilder.FilterFunctionBuilder(fieldValueFactorFunction(DOUBLE_FIELD).modifier(FieldValueFactorFunction.Modifier.LN).setWeight(2)), + new FunctionScoreQueryBuilder.FilterFunctionBuilder(scriptFunction(new Script("_index['" + TEXT_FIELD + "']['value'].tf()")).setWeight(2)) + })))).actionGet(); assertSearchResponse(response); assertThat(response.getHits().getAt(0).getScore(), is(1.0f)); @@ -235,14 +242,16 @@ public class FunctionScoreIT extends ESIntegTestCase { ScoreFunctionBuilder[] scoreFunctionBuilders = getScoreFunctionBuilders(); float[] weights = createRandomWeights(scoreFunctionBuilders.length); float[] scores = getScores(scoreFunctionBuilders); - - String scoreMode = getRandomScoreMode(); - FunctionScoreQueryBuilder withWeights = functionScoreQuery(constantScoreQuery(termQuery(TEXT_FIELD, "value"))).scoreMode(scoreMode); int weightscounter = 0; + FunctionScoreQueryBuilder.FilterFunctionBuilder[] filterFunctionBuilders = new FunctionScoreQueryBuilder.FilterFunctionBuilder[scoreFunctionBuilders.length]; for (ScoreFunctionBuilder builder : scoreFunctionBuilders) { - withWeights.add(builder.setWeight(weights[weightscounter])); + filterFunctionBuilders[weightscounter] = new FunctionScoreQueryBuilder.FilterFunctionBuilder(builder.setWeight(weights[weightscounter])); weightscounter++; } + FiltersFunctionScoreQuery.ScoreMode scoreMode = randomFrom(FiltersFunctionScoreQuery.ScoreMode.AVG, FiltersFunctionScoreQuery.ScoreMode.SUM, + FiltersFunctionScoreQuery.ScoreMode.MIN, FiltersFunctionScoreQuery.ScoreMode.MAX, FiltersFunctionScoreQuery.ScoreMode.MULTIPLY); + FunctionScoreQueryBuilder withWeights = functionScoreQuery(constantScoreQuery(termQuery(TEXT_FIELD, "value")), filterFunctionBuilders).scoreMode(scoreMode); + SearchResponse responseWithWeights = client().search( searchRequest().source(searchSource().query(withWeights)) ).actionGet(); @@ -251,38 +260,48 @@ public class FunctionScoreIT extends ESIntegTestCase { assertThat((float) expectedScore / responseWithWeights.getHits().getAt(0).getScore(), is(1.0f)); } - protected double computeExpectedScore(float[] weights, float[] scores, String scoreMode) { - double expectedScore = 0.0; - if ("multiply".equals(scoreMode)) { - expectedScore = 1.0; - } - if ("max".equals(scoreMode)) { - expectedScore = Float.MAX_VALUE * -1.0; - } - if ("min".equals(scoreMode)) { - expectedScore = Float.MAX_VALUE; + protected double computeExpectedScore(float[] weights, float[] scores, FiltersFunctionScoreQuery.ScoreMode scoreMode) { + double expectedScore; + switch(scoreMode) { + case MULTIPLY: + expectedScore = 1.0; + break; + case MAX: + expectedScore = Float.MAX_VALUE * -1.0; + break; + case MIN: + expectedScore = Float.MAX_VALUE; + break; + default: + expectedScore = 0.0; + break; } float weightSum = 0; - for (int i = 0; i < weights.length; i++) { double functionScore = (double) weights[i] * scores[i]; weightSum += weights[i]; - - if ("avg".equals(scoreMode)) { - expectedScore += functionScore; - } else if ("max".equals(scoreMode)) { - expectedScore = Math.max(functionScore, expectedScore); - } else if ("min".equals(scoreMode)) { - expectedScore = Math.min(functionScore, expectedScore); - } else if ("sum".equals(scoreMode)) { - expectedScore += functionScore; - } else if ("multiply".equals(scoreMode)) { - expectedScore *= functionScore; + switch(scoreMode) { + case AVG: + expectedScore += functionScore; + break; + case MAX: + expectedScore = Math.max(functionScore, expectedScore); + break; + case MIN: + expectedScore = Math.min(functionScore, expectedScore); + break; + case SUM: + expectedScore += functionScore; + break; + case MULTIPLY: + expectedScore *= functionScore; + break; + default: + throw new UnsupportedOperationException(); } - } - if ("avg".equals(scoreMode)) { + if (scoreMode == FiltersFunctionScoreQuery.ScoreMode.AVG) { expectedScore /= weightSum; } return expectedScore; @@ -309,8 +328,7 @@ public class FunctionScoreIT extends ESIntegTestCase { ScoreFunctionBuilder scoreFunctionBuilder = scoreFunctionBuilders[randomInt(3)]; float[] weights = createRandomWeights(1); float[] scores = getScores(scoreFunctionBuilder); - FunctionScoreQueryBuilder withWeights = functionScoreQuery(constantScoreQuery(termQuery(TEXT_FIELD, "value"))); - withWeights.add(scoreFunctionBuilder.setWeight(weights[0])); + FunctionScoreQueryBuilder withWeights = functionScoreQuery(constantScoreQuery(termQuery(TEXT_FIELD, "value")), scoreFunctionBuilder.setWeight(weights[0])); SearchResponse responseWithWeights = client().search( searchRequest().source(searchSource().query(withWeights)) @@ -320,11 +338,6 @@ public class FunctionScoreIT extends ESIntegTestCase { } - private String getRandomScoreMode() { - String[] scoreModes = {"avg", "sum", "min", "max", "multiply"}; - return scoreModes[randomInt(scoreModes.length - 1)]; - } - private float[] getScores(ScoreFunctionBuilder... scoreFunctionBuilders) { float[] scores = new float[scoreFunctionBuilders.length]; int scorecounter = 0; @@ -332,8 +345,7 @@ public class FunctionScoreIT extends ESIntegTestCase { SearchResponse response = client().search( searchRequest().source( searchSource().query( - functionScoreQuery(constantScoreQuery(termQuery(TEXT_FIELD, "value"))) - .add(builder) + functionScoreQuery(constantScoreQuery(termQuery(TEXT_FIELD, "value")), builder) ))).actionGet(); scores[scorecounter] = response.getHits().getAt(0).getScore(); scorecounter++; @@ -397,12 +409,12 @@ public class FunctionScoreIT extends ESIntegTestCase { assertSearchResponse(response); assertThat(response.getHits().getAt(0).score(), equalTo(2.0f)); response = client().search( - searchRequest().source(searchSource().query(functionScoreQuery().add(new WeightBuilder().setWeight(2.0f)))) + searchRequest().source(searchSource().query(functionScoreQuery(new WeightBuilder().setWeight(2.0f)))) ).actionGet(); assertSearchResponse(response); assertThat(response.getHits().getAt(0).score(), equalTo(2.0f)); response = client().search( - searchRequest().source(searchSource().query(functionScoreQuery().add(weightFactorFunction(2.0f)))) + searchRequest().source(searchSource().query(functionScoreQuery(weightFactorFunction(2.0f)))) ).actionGet(); assertSearchResponse(response); assertThat(response.getHits().getAt(0).score(), equalTo(2.0f)); @@ -419,10 +431,10 @@ public class FunctionScoreIT extends ESIntegTestCase { searchSource().query( functionScoreQuery( functionScoreQuery( -functionScoreQuery().add(scriptFunction(new Script("1")))).add( - scriptFunction(new Script("_score.doubleValue()")))).add( + functionScoreQuery(scriptFunction(new Script("1"))), + scriptFunction(new Script("_score.doubleValue()"))), scriptFunction(new Script("_score.doubleValue()")) - ) + ) ) ) ).actionGet(); @@ -438,7 +450,7 @@ functionScoreQuery().add(scriptFunction(new Script("1")))).add( refresh(); SearchResponse response = client().search( searchRequest().source( - searchSource().query(functionScoreQuery().add(scriptFunction(new Script("_score.doubleValue()")))).aggregation( + searchSource().query(functionScoreQuery(scriptFunction(new Script("_score.doubleValue()")))).aggregation( terms("score_agg").script(new Script("_score.doubleValue()"))) ) ).actionGet(); @@ -457,7 +469,7 @@ functionScoreQuery().add(scriptFunction(new Script("1")))).add( SearchResponse searchResponse = client().search( searchRequest().source( searchSource().query( - functionScoreQuery().add(scriptFunction(new Script(Float.toString(score)))).setMinScore(minScore))) + functionScoreQuery(scriptFunction(new Script(Float.toString(score)))).setMinScore(minScore))) ).actionGet(); if (score < minScore) { assertThat(searchResponse.getHits().getTotalHits(), is(0l)); @@ -466,11 +478,11 @@ functionScoreQuery().add(scriptFunction(new Script("1")))).add( } searchResponse = client().search( - searchRequest().source(searchSource().query(functionScoreQuery() -.add(scriptFunction(new Script(Float.toString(score)))) - .add(scriptFunction(new Script(Float.toString(score)))) - .scoreMode("avg").setMinScore(minScore))) - ).actionGet(); + searchRequest().source(searchSource().query(functionScoreQuery(new MatchAllQueryBuilder(), new FunctionScoreQueryBuilder.FilterFunctionBuilder[] { + new FunctionScoreQueryBuilder.FilterFunctionBuilder(scriptFunction(new Script(Float.toString(score)))), + new FunctionScoreQueryBuilder.FilterFunctionBuilder(scriptFunction(new Script(Float.toString(score)))) + }).scoreMode(FiltersFunctionScoreQuery.ScoreMode.AVG).setMinScore(minScore))) + ).actionGet(); if (score < minScore) { assertThat(searchResponse.getHits().getTotalHits(), is(0l)); } else { @@ -499,16 +511,15 @@ functionScoreQuery().add(scriptFunction(new Script("1")))).add( } SearchResponse searchResponse = client().search( - searchRequest().source(searchSource().query(functionScoreQuery() - .add(scriptFunction(script)) + searchRequest().source(searchSource().query(functionScoreQuery(scriptFunction(script)) .setMinScore(minScore)).size(numDocs))).actionGet(); assertMinScoreSearchResponses(numDocs, searchResponse, numMatchingDocs); searchResponse = client().search( - searchRequest().source(searchSource().query(functionScoreQuery() - .add(scriptFunction(script)) - .add(scriptFunction(script)) - .scoreMode("avg").setMinScore(minScore)).size(numDocs))).actionGet(); + searchRequest().source(searchSource().query(functionScoreQuery(new MatchAllQueryBuilder(), new FunctionScoreQueryBuilder.FilterFunctionBuilder[] { + new FunctionScoreQueryBuilder.FilterFunctionBuilder(scriptFunction(script)), + new FunctionScoreQueryBuilder.FilterFunctionBuilder(scriptFunction(script)) + }).scoreMode(FiltersFunctionScoreQuery.ScoreMode.AVG).setMinScore(minScore)).size(numDocs))).actionGet(); assertMinScoreSearchResponses(numDocs, searchResponse, numMatchingDocs); } @@ -531,15 +542,12 @@ functionScoreQuery().add(scriptFunction(new Script("1")))).add( // make sure that min_score works if functions is empty, see https://github.com/elastic/elasticsearch/issues/10253 float termQueryScore = 0.19178301f; - testMinScoreApplied("sum", termQueryScore); - testMinScoreApplied("avg", termQueryScore); - testMinScoreApplied("max", termQueryScore); - testMinScoreApplied("min", termQueryScore); - testMinScoreApplied("multiply", termQueryScore); - testMinScoreApplied("replace", termQueryScore); + for (CombineFunction combineFunction : CombineFunction.values()) { + testMinScoreApplied(combineFunction, termQueryScore); + } } - protected void testMinScoreApplied(String boostMode, float expectedScore) throws InterruptedException, ExecutionException { + protected void testMinScoreApplied(CombineFunction boostMode, float expectedScore) throws InterruptedException, ExecutionException { SearchResponse response = client().search( searchRequest().source( searchSource().explain(true).query( diff --git a/core/src/test/java/org/elasticsearch/search/functionscore/FunctionScorePluginIT.java b/core/src/test/java/org/elasticsearch/search/functionscore/FunctionScorePluginIT.java index 946fb593bd1..4dab3c3f6cc 100644 --- a/core/src/test/java/org/elasticsearch/search/functionscore/FunctionScorePluginIT.java +++ b/core/src/test/java/org/elasticsearch/search/functionscore/FunctionScorePluginIT.java @@ -24,7 +24,7 @@ import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.common.Priority; -import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.index.query.functionscore.DecayFunction; import org.elasticsearch.index.query.functionscore.DecayFunctionBuilder; import org.elasticsearch.index.query.functionscore.DecayFunctionParser; @@ -41,7 +41,6 @@ import java.util.Collection; import static org.elasticsearch.client.Requests.indexRequest; import static org.elasticsearch.client.Requests.searchRequest; -import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.functionScoreQuery; import static org.elasticsearch.index.query.QueryBuilders.termQuery; @@ -82,7 +81,7 @@ public class FunctionScorePluginIT extends ESIntegTestCase { DecayFunctionBuilder gfb = new CustomDistanceScoreBuilder("num1", "2013-05-28", "+1d"); ActionFuture response = client().search(searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source( - searchSource().explain(false).query(functionScoreQuery(termQuery("test", "value")).add(gfb)))); + searchSource().explain(false).query(functionScoreQuery(termQuery("test", "value"), gfb)))); SearchResponse sr = response.actionGet(); ElasticsearchAssertions.assertNoFailures(sr); @@ -109,10 +108,11 @@ public class FunctionScorePluginIT extends ESIntegTestCase { public void onModule(SearchModule scoreModule) { scoreModule.registerFunctionScoreParser(FunctionScorePluginIT.CustomDistanceScoreParser.class); } - } - public static class CustomDistanceScoreParser extends DecayFunctionParser { + public static class CustomDistanceScoreParser extends DecayFunctionParser { + + private static final CustomDistanceScoreBuilder PROTOTYPE = new CustomDistanceScoreBuilder("", "", ""); public static final String[] NAMES = { "linear_mult", "linearMult" }; @@ -121,20 +121,46 @@ public class FunctionScorePluginIT extends ESIntegTestCase { return NAMES; } - static final DecayFunction decayFunction = new LinearMultScoreFunction(); + @Override + public CustomDistanceScoreBuilder getBuilderPrototype() { + return PROTOTYPE; + } + } + + public static class CustomDistanceScoreBuilder extends DecayFunctionBuilder { + + public CustomDistanceScoreBuilder(String fieldName, Object origin, Object scale) { + super(fieldName, origin, scale, null); + } + + private CustomDistanceScoreBuilder(String fieldName, BytesReference functionBytes) { + super(fieldName, functionBytes); + } + + @Override + protected CustomDistanceScoreBuilder createFunctionBuilder(String fieldName, BytesReference functionBytes) { + return new CustomDistanceScoreBuilder(fieldName, functionBytes); + } + + @Override + public String getName() { + return CustomDistanceScoreParser.NAMES[0]; + } @Override public DecayFunction getDecayFunction() { return decayFunction; } - static class LinearMultScoreFunction implements DecayFunction { + private static final DecayFunction decayFunction = new LinearMultScoreFunction(); + + private static class LinearMultScoreFunction implements DecayFunction { LinearMultScoreFunction() { } @Override public double evaluate(double value, double scale) { - + return value; } @@ -149,17 +175,4 @@ public class FunctionScorePluginIT extends ESIntegTestCase { } } } - - public class CustomDistanceScoreBuilder extends DecayFunctionBuilder { - - public CustomDistanceScoreBuilder(String fieldName, Object origin, Object scale) { - super(fieldName, origin, scale); - } - - @Override - public String getName() { - return CustomDistanceScoreParser.NAMES[0]; - } - - } } diff --git a/core/src/test/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java b/core/src/test/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java index cdbe3be602f..094f528dc96 100644 --- a/core/src/test/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java +++ b/core/src/test/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java @@ -20,6 +20,7 @@ package org.elasticsearch.search.functionscore; import org.apache.lucene.util.ArrayUtil; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder; import org.elasticsearch.index.query.functionscore.random.RandomScoreFunctionBuilder; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService.ScriptType; @@ -104,7 +105,7 @@ public class RandomScoreFunctionIT extends ESIntegTestCase { public void testScoreAccessWithinScript() throws Exception { assertAcked(prepareCreate("test").addMapping("type", "body", "type=string", "index", - "type=" + randomFrom(new String[] { "short", "float", "long", "integer", "double" }))); + "type=" + randomFrom("short", "float", "long", "integer", "double"))); ensureYellow(); int docCount = randomIntBetween(100, 200); @@ -120,9 +121,10 @@ public class RandomScoreFunctionIT extends ESIntegTestCase { SearchResponse resp = client() .prepareSearch("test") .setQuery( - functionScoreQuery(matchQuery("body", "foo")).add(fieldValueFactorFunction("index").factor(2)).add( - scriptFunction(new Script("log(doc['index'].value + (factor * _score))", ScriptType.INLINE, null, params)))) - .get(); + functionScoreQuery(matchQuery("body", "foo"), new FunctionScoreQueryBuilder.FilterFunctionBuilder[]{ + new FunctionScoreQueryBuilder.FilterFunctionBuilder(fieldValueFactorFunction("index").factor(2)), + new FunctionScoreQueryBuilder.FilterFunctionBuilder(scriptFunction(new Script("log(doc['index'].value + (factor * _score))", ScriptType.INLINE, null, params))) + })).get(); assertNoFailures(resp); SearchHit firstHit = resp.getHits().getAt(0); assertThat(firstHit.getScore(), greaterThan(1f)); @@ -131,9 +133,10 @@ public class RandomScoreFunctionIT extends ESIntegTestCase { resp = client() .prepareSearch("test") .setQuery( - functionScoreQuery(matchQuery("body", "foo")).add(fieldValueFactorFunction("index").factor(2)).add( - scriptFunction(new Script("log(doc['index'].value + (factor * _score.intValue()))", ScriptType.INLINE, - null, params)))).get(); + functionScoreQuery(matchQuery("body", "foo"), new FunctionScoreQueryBuilder.FilterFunctionBuilder[]{ + new FunctionScoreQueryBuilder.FilterFunctionBuilder(fieldValueFactorFunction("index").factor(2)), + new FunctionScoreQueryBuilder.FilterFunctionBuilder(scriptFunction(new Script("log(doc['index'].value + (factor * _score.intValue()))", ScriptType.INLINE, null, params))) + })).get(); assertNoFailures(resp); firstHit = resp.getHits().getAt(0); assertThat(firstHit.getScore(), greaterThan(1f)); @@ -142,9 +145,10 @@ public class RandomScoreFunctionIT extends ESIntegTestCase { resp = client() .prepareSearch("test") .setQuery( - functionScoreQuery(matchQuery("body", "foo")).add(fieldValueFactorFunction("index").factor(2)).add( - scriptFunction(new Script("log(doc['index'].value + (factor * _score.longValue()))", ScriptType.INLINE, - null, params)))).get(); + functionScoreQuery(matchQuery("body", "foo"), new FunctionScoreQueryBuilder.FilterFunctionBuilder[]{ + new FunctionScoreQueryBuilder.FilterFunctionBuilder(fieldValueFactorFunction("index").factor(2)), + new FunctionScoreQueryBuilder.FilterFunctionBuilder(scriptFunction(new Script("log(doc['index'].value + (factor * _score.longValue()))", ScriptType.INLINE, null, params))) + })).get(); assertNoFailures(resp); firstHit = resp.getHits().getAt(0); assertThat(firstHit.getScore(), greaterThan(1f)); @@ -153,9 +157,11 @@ public class RandomScoreFunctionIT extends ESIntegTestCase { resp = client() .prepareSearch("test") .setQuery( - functionScoreQuery(matchQuery("body", "foo")).add(fieldValueFactorFunction("index").factor(2)).add( - scriptFunction(new Script("log(doc['index'].value + (factor * _score.floatValue()))", ScriptType.INLINE, - null, params)))).get(); + functionScoreQuery(matchQuery("body", "foo"), new FunctionScoreQueryBuilder.FilterFunctionBuilder[]{ + new FunctionScoreQueryBuilder.FilterFunctionBuilder(fieldValueFactorFunction("index").factor(2)), + new FunctionScoreQueryBuilder.FilterFunctionBuilder(scriptFunction(new Script("log(doc['index'].value + (factor * _score.floatValue()))", + ScriptType.INLINE, null, params))) + })).get(); assertNoFailures(resp); firstHit = resp.getHits().getAt(0); assertThat(firstHit.getScore(), greaterThan(1f)); @@ -164,9 +170,11 @@ public class RandomScoreFunctionIT extends ESIntegTestCase { resp = client() .prepareSearch("test") .setQuery( - functionScoreQuery(matchQuery("body", "foo")).add(fieldValueFactorFunction("index").factor(2)).add( - scriptFunction(new Script("log(doc['index'].value + (factor * _score.doubleValue()))", ScriptType.INLINE, - null, params)))).get(); + functionScoreQuery(matchQuery("body", "foo"), new FunctionScoreQueryBuilder.FilterFunctionBuilder[]{ + new FunctionScoreQueryBuilder.FilterFunctionBuilder(fieldValueFactorFunction("index").factor(2)), + new FunctionScoreQueryBuilder.FilterFunctionBuilder(scriptFunction(new Script("log(doc['index'].value + (factor * _score.doubleValue()))", + ScriptType.INLINE, null, params))) + })).get(); assertNoFailures(resp); firstHit = resp.getHits().getAt(0); assertThat(firstHit.getScore(), greaterThan(1f)); diff --git a/core/src/test/java/org/elasticsearch/search/geo/GeoBoundingBoxIT.java b/core/src/test/java/org/elasticsearch/search/geo/GeoBoundingBoxIT.java index cb791685179..b61b38993ee 100644 --- a/core/src/test/java/org/elasticsearch/search/geo/GeoBoundingBoxIT.java +++ b/core/src/test/java/org/elasticsearch/search/geo/GeoBoundingBoxIT.java @@ -22,12 +22,12 @@ package org.elasticsearch.search.geo; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.index.query.GeoValidationMethod; import org.elasticsearch.search.SearchHit; import org.elasticsearch.test.ESIntegTestCase; import org.junit.Test; import static org.elasticsearch.index.query.QueryBuilders.boolQuery; - import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.*; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; @@ -91,7 +91,7 @@ public class GeoBoundingBoxIT extends ESIntegTestCase { client().admin().indices().prepareRefresh().execute().actionGet(); SearchResponse searchResponse = client().prepareSearch() // from NY - .setQuery(geoBoundingBoxQuery("location").topLeft(40.73, -74.1).bottomRight(40.717, -73.99)) + .setQuery(geoBoundingBoxQuery("location").setCorners(40.73, -74.1, 40.717, -73.99)) .execute().actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(2l)); assertThat(searchResponse.getHits().hits().length, equalTo(2)); @@ -100,7 +100,7 @@ public class GeoBoundingBoxIT extends ESIntegTestCase { } searchResponse = client().prepareSearch() // from NY - .setQuery(geoBoundingBoxQuery("location").topLeft(40.73, -74.1).bottomRight(40.717, -73.99).type("indexed")) + .setQuery(geoBoundingBoxQuery("location").setCorners(40.73, -74.1, 40.717, -73.99).type("indexed")) .execute().actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(2l)); assertThat(searchResponse.getHits().hits().length, equalTo(2)); @@ -160,52 +160,52 @@ public class GeoBoundingBoxIT extends ESIntegTestCase { refresh(); SearchResponse searchResponse = client().prepareSearch() - .setQuery(geoBoundingBoxQuery("location").topLeft(41, -11).bottomRight(40, 9)) + .setQuery(geoBoundingBoxQuery("location").setCorners(41, -11, 40, 9)) .execute().actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(1l)); assertThat(searchResponse.getHits().hits().length, equalTo(1)); assertThat(searchResponse.getHits().getAt(0).id(), equalTo("2")); searchResponse = client().prepareSearch() - .setQuery(geoBoundingBoxQuery("location").topLeft(41, -11).bottomRight(40, 9).type("indexed")) + .setQuery(geoBoundingBoxQuery("location").setCorners(41, -11, 40, 9).type("indexed")) .execute().actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(1l)); assertThat(searchResponse.getHits().hits().length, equalTo(1)); assertThat(searchResponse.getHits().getAt(0).id(), equalTo("2")); searchResponse = client().prepareSearch() - .setQuery(geoBoundingBoxQuery("location").topLeft(41, -9).bottomRight(40, 11)) + .setQuery(geoBoundingBoxQuery("location").setCorners(41, -9, 40, 11)) .execute().actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(1l)); assertThat(searchResponse.getHits().hits().length, equalTo(1)); assertThat(searchResponse.getHits().getAt(0).id(), equalTo("3")); searchResponse = client().prepareSearch() - .setQuery(geoBoundingBoxQuery("location").topLeft(41, -9).bottomRight(40, 11).type("indexed")) + .setQuery(geoBoundingBoxQuery("location").setCorners(41, -9, 40, 11).type("indexed")) .execute().actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(1l)); assertThat(searchResponse.getHits().hits().length, equalTo(1)); assertThat(searchResponse.getHits().getAt(0).id(), equalTo("3")); searchResponse = client().prepareSearch() - .setQuery(geoBoundingBoxQuery("location").topLeft(11, 171).bottomRight(1, -169)) + .setQuery(geoBoundingBoxQuery("location").setCorners(11, 171, 1, -169)) .execute().actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(1l)); assertThat(searchResponse.getHits().hits().length, equalTo(1)); assertThat(searchResponse.getHits().getAt(0).id(), equalTo("5")); searchResponse = client().prepareSearch() - .setQuery(geoBoundingBoxQuery("location").topLeft(11, 171).bottomRight(1, -169).type("indexed")) + .setQuery(geoBoundingBoxQuery("location").setCorners(11, 171, 1, -169).type("indexed")) .execute().actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(1l)); assertThat(searchResponse.getHits().hits().length, equalTo(1)); assertThat(searchResponse.getHits().getAt(0).id(), equalTo("5")); searchResponse = client().prepareSearch() - .setQuery(geoBoundingBoxQuery("location").topLeft(9, 169).bottomRight(-1, -171)) + .setQuery(geoBoundingBoxQuery("location").setCorners(9, 169, -1, -171)) .execute().actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(1l)); assertThat(searchResponse.getHits().hits().length, equalTo(1)); assertThat(searchResponse.getHits().getAt(0).id(), equalTo("9")); searchResponse = client().prepareSearch() - .setQuery(geoBoundingBoxQuery("location").topLeft(9, 169).bottomRight(-1, -171).type("indexed")) + .setQuery(geoBoundingBoxQuery("location").setCorners(9, 169, -1, -171).type("indexed")) .execute().actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(1l)); assertThat(searchResponse.getHits().hits().length, equalTo(1)); @@ -239,26 +239,26 @@ public class GeoBoundingBoxIT extends ESIntegTestCase { SearchResponse searchResponse = client().prepareSearch() .setQuery( boolQuery().must(termQuery("userid", 880)).filter( - geoBoundingBoxQuery("location").topLeft(74.579421999999994, 143.5).bottomRight(-66.668903999999998, 113.96875)) + geoBoundingBoxQuery("location").setCorners(74.579421999999994, 143.5, -66.668903999999998, 113.96875)) ).execute().actionGet(); assertThat(searchResponse.getHits().totalHits(), equalTo(1l)); searchResponse = client().prepareSearch() .setQuery( boolQuery().must(termQuery("userid", 880)).filter( - geoBoundingBoxQuery("location").topLeft(74.579421999999994, 143.5).bottomRight(-66.668903999999998, 113.96875).type("indexed")) + geoBoundingBoxQuery("location").setCorners(74.579421999999994, 143.5, -66.668903999999998, 113.96875).type("indexed")) ).execute().actionGet(); assertThat(searchResponse.getHits().totalHits(), equalTo(1l)); searchResponse = client().prepareSearch() .setQuery( boolQuery().must(termQuery("userid", 534)).filter( - geoBoundingBoxQuery("location").topLeft(74.579421999999994, 143.5).bottomRight(-66.668903999999998, 113.96875)) + geoBoundingBoxQuery("location").setCorners(74.579421999999994, 143.5, -66.668903999999998, 113.96875)) ).execute().actionGet(); assertThat(searchResponse.getHits().totalHits(), equalTo(1l)); searchResponse = client().prepareSearch() .setQuery( boolQuery().must(termQuery("userid", 534)).filter( - geoBoundingBoxQuery("location").topLeft(74.579421999999994, 143.5).bottomRight(-66.668903999999998, 113.96875).type("indexed")) + geoBoundingBoxQuery("location").setCorners(74.579421999999994, 143.5, -66.668903999999998, 113.96875).type("indexed")) ).execute().actionGet(); assertThat(searchResponse.getHits().totalHits(), equalTo(1l)); } @@ -289,43 +289,43 @@ public class GeoBoundingBoxIT extends ESIntegTestCase { SearchResponse searchResponse = client().prepareSearch() .setQuery( - geoBoundingBoxQuery("location").coerce(true).topLeft(50, -180).bottomRight(-50, 180) + geoBoundingBoxQuery("location").setValidationMethod(GeoValidationMethod.COERCE).setCorners(50, -180, -50, 180) ).execute().actionGet(); assertThat(searchResponse.getHits().totalHits(), equalTo(1l)); searchResponse = client().prepareSearch() .setQuery( - geoBoundingBoxQuery("location").coerce(true).topLeft(50, -180).bottomRight(-50, 180).type("indexed") + geoBoundingBoxQuery("location").setValidationMethod(GeoValidationMethod.COERCE).setCorners(50, -180, -50, 180).type("indexed") ).execute().actionGet(); assertThat(searchResponse.getHits().totalHits(), equalTo(1l)); searchResponse = client().prepareSearch() .setQuery( - geoBoundingBoxQuery("location").coerce(true).topLeft(90, -180).bottomRight(-90, 180) + geoBoundingBoxQuery("location").setValidationMethod(GeoValidationMethod.COERCE).setCorners(90, -180, -90, 180) ).execute().actionGet(); assertThat(searchResponse.getHits().totalHits(), equalTo(2l)); searchResponse = client().prepareSearch() .setQuery( - geoBoundingBoxQuery("location").coerce(true).topLeft(90, -180).bottomRight(-90, 180).type("indexed") + geoBoundingBoxQuery("location").setValidationMethod(GeoValidationMethod.COERCE).setCorners(90, -180, -90, 180).type("indexed") ).execute().actionGet(); assertThat(searchResponse.getHits().totalHits(), equalTo(2l)); searchResponse = client().prepareSearch() .setQuery( - geoBoundingBoxQuery("location").coerce(true).topLeft(50, 0).bottomRight(-50, 360) + geoBoundingBoxQuery("location").setValidationMethod(GeoValidationMethod.COERCE).setCorners(50, 0, -50, 360) ).execute().actionGet(); assertThat(searchResponse.getHits().totalHits(), equalTo(1l)); searchResponse = client().prepareSearch() .setQuery( - geoBoundingBoxQuery("location").coerce(true).topLeft(50, 0).bottomRight(-50, 360).type("indexed") + geoBoundingBoxQuery("location").setValidationMethod(GeoValidationMethod.COERCE).setCorners(50, 0, -50, 360).type("indexed") ).execute().actionGet(); assertThat(searchResponse.getHits().totalHits(), equalTo(1l)); searchResponse = client().prepareSearch() .setQuery( - geoBoundingBoxQuery("location").coerce(true).topLeft(90, 0).bottomRight(-90, 360) + geoBoundingBoxQuery("location").setValidationMethod(GeoValidationMethod.COERCE).setCorners(90, 0, -90, 360) ).execute().actionGet(); assertThat(searchResponse.getHits().totalHits(), equalTo(2l)); searchResponse = client().prepareSearch() .setQuery( - geoBoundingBoxQuery("location").coerce(true).topLeft(90, 0).bottomRight(-90, 360).type("indexed") + geoBoundingBoxQuery("location").setValidationMethod(GeoValidationMethod.COERCE).setCorners(90, 0, -90, 360).type("indexed") ).execute().actionGet(); assertThat(searchResponse.getHits().totalHits(), equalTo(2l)); } diff --git a/core/src/test/java/org/elasticsearch/search/geo/GeoDistanceIT.java b/core/src/test/java/org/elasticsearch/search/geo/GeoDistanceIT.java index d59baf1b9b1..809de3009e6 100644 --- a/core/src/test/java/org/elasticsearch/search/geo/GeoDistanceIT.java +++ b/core/src/test/java/org/elasticsearch/search/geo/GeoDistanceIT.java @@ -75,7 +75,7 @@ public class GeoDistanceIT extends ESIntegTestCase { indexRandom(true, client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject() .field("name", "New York") .startObject("location").field("lat", 40.7143528).field("lon", -74.0059731).endObject() - .endObject()), + .endObject()), // to NY: 5.286 km client().prepareIndex("test", "type1", "2").setSource(jsonBuilder().startObject() .field("name", "Times Square") @@ -171,7 +171,7 @@ public class GeoDistanceIT extends ESIntegTestCase { } searchResponse = client().prepareSearch() // from NY - .setQuery(geoDistanceRangeQuery("location").from("1.0km").to("2.0km").point(40.7143528, -74.0059731)) + .setQuery(geoDistanceRangeQuery("location", 40.7143528, -74.0059731).from("1.0km").to("2.0km")) .execute().actionGet(); assertHitCount(searchResponse, 2); assertThat(searchResponse.getHits().hits().length, equalTo(2)); @@ -179,7 +179,7 @@ public class GeoDistanceIT extends ESIntegTestCase { assertThat(hit.id(), anyOf(equalTo("4"), equalTo("5"))); } searchResponse = client().prepareSearch() // from NY - .setQuery(geoDistanceRangeQuery("location").from("1.0km").to("2.0km").point(40.7143528, -74.0059731).optimizeBbox("indexed")) + .setQuery(geoDistanceRangeQuery("location", 40.7143528, -74.0059731).from("1.0km").to("2.0km").optimizeBbox("indexed")) .execute().actionGet(); assertHitCount(searchResponse, 2); assertThat(searchResponse.getHits().hits().length, equalTo(2)); @@ -188,13 +188,13 @@ public class GeoDistanceIT extends ESIntegTestCase { } searchResponse = client().prepareSearch() // from NY - .setQuery(geoDistanceRangeQuery("location").to("2.0km").point(40.7143528, -74.0059731)) + .setQuery(geoDistanceRangeQuery("location", 40.7143528, -74.0059731).to("2.0km")) .execute().actionGet(); assertHitCount(searchResponse, 4); assertThat(searchResponse.getHits().hits().length, equalTo(4)); searchResponse = client().prepareSearch() // from NY - .setQuery(geoDistanceRangeQuery("location").from("2.0km").point(40.7143528, -74.0059731)) + .setQuery(geoDistanceRangeQuery("location", 40.7143528, -74.0059731).from("2.0km")) .execute().actionGet(); assertHitCount(searchResponse, 3); assertThat(searchResponse.getHits().hits().length, equalTo(3)); @@ -394,7 +394,7 @@ public class GeoDistanceIT extends ESIntegTestCase { // Doc with missing geo point is first, is consistent with 0.20.x assertHitCount(searchResponse, 2); - assertOrderedSearchHits(searchResponse, "2", "1"); + assertOrderedSearchHits(searchResponse, "2", "1"); assertThat(((Number) searchResponse.getHits().getAt(0).sortValues()[0]).doubleValue(), equalTo(Double.MAX_VALUE)); assertThat(((Number) searchResponse.getHits().getAt(1).sortValues()[0]).doubleValue(), closeTo(5286d, 10d)); } @@ -508,7 +508,7 @@ public class GeoDistanceIT extends ESIntegTestCase { .startObject("location").field("lat", 40.7143528).field("lon", -74.0059731).endObject() .endObject() .endArray() - .endObject()), + .endObject()), client().prepareIndex("companies", "company", "2").setSource(jsonBuilder().startObject() .field("name", "company 2") .startArray("branches") @@ -641,7 +641,7 @@ public class GeoDistanceIT extends ESIntegTestCase { RestStatus.BAD_REQUEST, containsString("sort_mode [sum] isn't supported for sorting by geo distance")); } - + /** * Issue 3073 */ @@ -681,12 +681,12 @@ public class GeoDistanceIT extends ESIntegTestCase { .setQuery(QueryBuilders.matchAllQuery()) .setPostFilter(QueryBuilders.geoDistanceQuery("pin") .geoDistance(GeoDistance.ARC) - .lat(lat).lon(lon) + .point(lat, lon) .distance("1m")) .execute().actionGet(); assertHitCount(result, 1); - } + } private double randomLon() { return randomDouble() * 360 - 180; diff --git a/core/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java b/core/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java index 4d5ae302958..4548fbd7d0e 100644 --- a/core/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java +++ b/core/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java @@ -434,9 +434,7 @@ public class GeoFilterIT extends ESIntegTestCase { } SearchResponse world = client().prepareSearch().addField("pin").setQuery( - geoBoundingBoxQuery("pin") - .topLeft(90, -179.99999) - .bottomRight(-90, 179.99999) + geoBoundingBoxQuery("pin").setCorners(90, -179.99999, -90, 179.99999) ).execute().actionGet(); assertHitCount(world, 53); diff --git a/core/src/test/java/org/elasticsearch/search/geo/GeoPolygonIT.java b/core/src/test/java/org/elasticsearch/search/geo/GeoPolygonIT.java index d8417014210..248c62bf16e 100644 --- a/core/src/test/java/org/elasticsearch/search/geo/GeoPolygonIT.java +++ b/core/src/test/java/org/elasticsearch/search/geo/GeoPolygonIT.java @@ -20,12 +20,16 @@ package org.elasticsearch.search.geo; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.search.SearchHit; import org.elasticsearch.test.ESIntegTestCase; import org.junit.Test; +import java.util.ArrayList; +import java.util.List; + import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.boolQuery; import static org.elasticsearch.index.query.QueryBuilders.geoPolygonQuery; @@ -49,7 +53,7 @@ public class GeoPolygonIT extends ESIntegTestCase { indexRandom(true, client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject() .field("name", "New York") .startObject("location").field("lat", 40.714).field("lon", -74.006).endObject() - .endObject()), + .endObject()), // to NY: 5.286 km client().prepareIndex("test", "type1", "2").setSource(jsonBuilder().startObject() .field("name", "Times Square") @@ -85,14 +89,14 @@ public class GeoPolygonIT extends ESIntegTestCase { @Test public void simplePolygonTest() throws Exception { - + List points = new ArrayList<>(); + points.add(new GeoPoint(40.7, -74.0)); + points.add(new GeoPoint(40.7, -74.1)); + points.add(new GeoPoint(40.8, -74.1)); + points.add(new GeoPoint(40.8, -74.0)); + points.add(new GeoPoint(40.7, -74.0)); SearchResponse searchResponse = client().prepareSearch("test") // from NY - .setQuery(boolQuery().must(geoPolygonQuery("location") - .addPoint(40.7, -74.0) - .addPoint(40.7, -74.1) - .addPoint(40.8, -74.1) - .addPoint(40.8, -74.0) - .addPoint(40.7, -74.0))) + .setQuery(boolQuery().must(geoPolygonQuery("location", points))) .execute().actionGet(); assertHitCount(searchResponse, 4); assertThat(searchResponse.getHits().hits().length, equalTo(4)); @@ -103,13 +107,13 @@ public class GeoPolygonIT extends ESIntegTestCase { @Test public void simpleUnclosedPolygon() throws Exception { + List points = new ArrayList<>(); + points.add(new GeoPoint(40.7, -74.0)); + points.add(new GeoPoint(40.7, -74.1)); + points.add(new GeoPoint(40.8, -74.1)); + points.add(new GeoPoint(40.8, -74.0)); SearchResponse searchResponse = client().prepareSearch("test") // from NY - .setQuery(boolQuery().must(geoPolygonQuery("location") - .addPoint(40.7, -74.0) - .addPoint(40.7, -74.1) - .addPoint(40.8, -74.1) - .addPoint(40.8, -74.0))) - .execute().actionGet(); + .setQuery(boolQuery().must(geoPolygonQuery("location", points))).execute().actionGet(); assertHitCount(searchResponse, 4); assertThat(searchResponse.getHits().hits().length, equalTo(4)); for (SearchHit hit : searchResponse.getHits()) { diff --git a/core/src/test/java/org/elasticsearch/search/geo/GeoShapeIntegrationIT.java b/core/src/test/java/org/elasticsearch/search/geo/GeoShapeIntegrationIT.java index feb3322d7ef..670d31739b1 100644 --- a/core/src/test/java/org/elasticsearch/search/geo/GeoShapeIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/search/geo/GeoShapeIntegrationIT.java @@ -49,7 +49,10 @@ import static org.elasticsearch.index.query.QueryBuilders.geoShapeQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.nullValue; public class GeoShapeIntegrationIT extends ESIntegTestCase { @@ -286,28 +289,28 @@ public class GeoShapeIntegrationIT extends ESIntegTestCase { .endObject().endObject())); ensureSearchable("test", "shapes"); - GeoShapeQueryBuilder filter = QueryBuilders.geoShapeQuery("location", "1", "type", ShapeRelation.INTERSECTS) + GeoShapeQueryBuilder filter = QueryBuilders.geoShapeQuery("location", "1", "type").relation(ShapeRelation.INTERSECTS) .indexedShapeIndex("shapes") .indexedShapePath("location"); SearchResponse result = client().prepareSearch("test").setQuery(QueryBuilders.matchAllQuery()) .setPostFilter(filter).get(); assertSearchResponse(result); assertHitCount(result, 1); - filter = QueryBuilders.geoShapeQuery("location", "1", "type", ShapeRelation.INTERSECTS) + filter = QueryBuilders.geoShapeQuery("location", "1", "type").relation(ShapeRelation.INTERSECTS) .indexedShapeIndex("shapes") .indexedShapePath("1.location"); result = client().prepareSearch("test").setQuery(QueryBuilders.matchAllQuery()) .setPostFilter(filter).get(); assertSearchResponse(result); assertHitCount(result, 1); - filter = QueryBuilders.geoShapeQuery("location", "1", "type", ShapeRelation.INTERSECTS) + filter = QueryBuilders.geoShapeQuery("location", "1", "type").relation(ShapeRelation.INTERSECTS) .indexedShapeIndex("shapes") .indexedShapePath("1.2.location"); result = client().prepareSearch("test").setQuery(QueryBuilders.matchAllQuery()) .setPostFilter(filter).get(); assertSearchResponse(result); assertHitCount(result, 1); - filter = QueryBuilders.geoShapeQuery("location", "1", "type", ShapeRelation.INTERSECTS) + filter = QueryBuilders.geoShapeQuery("location", "1", "type").relation(ShapeRelation.INTERSECTS) .indexedShapeIndex("shapes") .indexedShapePath("1.2.3.location"); result = client().prepareSearch("test").setQuery(QueryBuilders.matchAllQuery()) @@ -360,7 +363,8 @@ public class GeoShapeIntegrationIT extends ESIntegTestCase { ShapeBuilder filterShape = (gcb.getShapeAt(randomIntBetween(0, gcb.numShapes() - 1))); - GeoShapeQueryBuilder filter = QueryBuilders.geoShapeQuery("location", filterShape, ShapeRelation.INTERSECTS); + GeoShapeQueryBuilder filter = QueryBuilders.geoShapeQuery("location", filterShape); + filter.relation(ShapeRelation.INTERSECTS); SearchResponse result = client().prepareSearch("test").setQuery(QueryBuilders.matchAllQuery()) .setPostFilter(filter).get(); assertSearchResponse(result); @@ -399,19 +403,30 @@ public class GeoShapeIntegrationIT extends ESIntegTestCase { .setSource(docSource)); ensureSearchable("test"); - GeoShapeQueryBuilder filter = QueryBuilders.geoShapeQuery("location", ShapeBuilder.newGeometryCollection().polygon(ShapeBuilder.newPolygon().point(99.0, -1.0).point(99.0, 3.0).point(103.0, 3.0).point(103.0, -1.0).point(99.0, -1.0)), ShapeRelation.INTERSECTS); + GeoShapeQueryBuilder filter = QueryBuilders.geoShapeQuery( + "location", + ShapeBuilder.newGeometryCollection() + .polygon( + ShapeBuilder.newPolygon().point(99.0, -1.0).point(99.0, 3.0).point(103.0, 3.0).point(103.0, -1.0) + .point(99.0, -1.0))).relation(ShapeRelation.INTERSECTS); SearchResponse result = client().prepareSearch("test").setQuery(QueryBuilders.matchAllQuery()) .setPostFilter(filter).get(); assertSearchResponse(result); assertHitCount(result, 1); - filter = QueryBuilders.geoShapeQuery("location", ShapeBuilder.newGeometryCollection().polygon(ShapeBuilder.newPolygon().point(199.0, -11.0).point(199.0, 13.0).point(193.0, 13.0).point(193.0, -11.0).point(199.0, -11.0)), ShapeRelation.INTERSECTS); + filter = QueryBuilders.geoShapeQuery( + "location", + ShapeBuilder.newGeometryCollection().polygon( + ShapeBuilder.newPolygon().point(199.0, -11.0).point(199.0, 13.0).point(193.0, 13.0).point(193.0, -11.0) + .point(199.0, -11.0))).relation(ShapeRelation.INTERSECTS); result = client().prepareSearch("test").setQuery(QueryBuilders.matchAllQuery()) .setPostFilter(filter).get(); assertSearchResponse(result); assertHitCount(result, 0); filter = QueryBuilders.geoShapeQuery("location", ShapeBuilder.newGeometryCollection() .polygon(ShapeBuilder.newPolygon().point(99.0, -1.0).point(99.0, 3.0).point(103.0, 3.0).point(103.0, -1.0).point(99.0, -1.0)) - .polygon(ShapeBuilder.newPolygon().point(199.0, -11.0).point(199.0, 13.0).point(193.0, 13.0).point(193.0, -11.0).point(199.0, -11.0)), ShapeRelation.INTERSECTS); + .polygon( + ShapeBuilder.newPolygon().point(199.0, -11.0).point(199.0, 13.0).point(193.0, 13.0).point(193.0, -11.0) + .point(199.0, -11.0))).relation(ShapeRelation.INTERSECTS); result = client().prepareSearch("test").setQuery(QueryBuilders.matchAllQuery()) .setPostFilter(filter).get(); assertSearchResponse(result); diff --git a/core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java b/core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java index 93449c90963..4134c4f2941 100644 --- a/core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java @@ -26,8 +26,13 @@ import org.elasticsearch.common.settings.Settings.Builder; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.query.*; -import org.elasticsearch.index.query.MatchQueryBuilder.Operator; -import org.elasticsearch.index.query.MatchQueryBuilder.Type; +import org.elasticsearch.index.query.IdsQueryBuilder; +import org.elasticsearch.index.query.MatchQueryBuilder; +import org.elasticsearch.index.search.MatchQuery.Type; +import org.elasticsearch.index.search.MatchQuery; +import org.elasticsearch.index.query.MultiMatchQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.builder.SearchSourceBuilder; @@ -938,12 +943,12 @@ public class HighlighterSearchIT extends ESIntegTestCase { assertHighlight(resp, 0, "foo", 0, equalTo("junk junk cats junk junk")); // which can also be written by searching on the subfield - resp = req.setQuery(queryStringQuery("cats").field("foo").field("foo.plain^5")).get(); + resp = req.setQuery(queryStringQuery("cats").field("foo").field("foo.plain", 5)).get(); assertHighlight(resp, 0, "foo", 0, equalTo("junk junk cats junk junk")); // Speaking of two fields, you can have two fields, only one of which has matchedFields enabled - QueryBuilder twoFieldsQuery = queryStringQuery("cats").field("foo").field("foo.plain^5") - .field("bar").field("bar.plain^5"); + QueryBuilder twoFieldsQuery = queryStringQuery("cats").field("foo").field("foo.plain", 5) + .field("bar").field("bar.plain", 5); resp = req.setQuery(twoFieldsQuery).addHighlightedField(barField).get(); assertHighlight(resp, 0, "foo", 0, equalTo("junk junk cats junk junk")); assertHighlight(resp, 0, "bar", 0, equalTo("cat cat junk junk junk junk")); @@ -1365,7 +1370,7 @@ public class HighlighterSearchIT extends ESIntegTestCase { logger.info("--> highlighting and searching on field1"); SearchSourceBuilder source = searchSource() - .query(boostingQuery().positive(termQuery("field2", "brown")).negative(termQuery("field2", "foobar")).negativeBoost(0.5f)) + .query(boostingQuery(termQuery("field2", "brown"), termQuery("field2", "foobar")).negativeBoost(0.5f)) .highlight(highlight().field("field2").order("score").preTags("").postTags("")); SearchResponse searchResponse = client().prepareSearch("test").setSource(source).get(); @@ -1384,7 +1389,7 @@ public class HighlighterSearchIT extends ESIntegTestCase { logger.info("--> highlighting and searching on field1"); SearchSourceBuilder source = searchSource() - .query(boostingQuery().positive(termQuery("field2", "brown")).negative(termQuery("field2", "foobar")).negativeBoost(0.5f)) + .query(boostingQuery(termQuery("field2", "brown"), termQuery("field2", "foobar")).negativeBoost(0.5f)) .highlight(highlight().field("field2").order("score").preTags("").postTags("")); SearchResponse searchResponse = client().prepareSearch("test").setSource(source).get(); @@ -1510,7 +1515,7 @@ public class HighlighterSearchIT extends ESIntegTestCase { refresh(); SearchResponse response = client().prepareSearch("test") - .setQuery(QueryBuilders.matchQuery("tags", "long tag").type(MatchQueryBuilder.Type.PHRASE)) + .setQuery(QueryBuilders.matchQuery("tags", "long tag").type(MatchQuery.Type.PHRASE)) .addHighlightedField(new HighlightBuilder.Field("tags") .fragmentSize(-1).numOfFragments(2).fragmenter("simple")).get(); @@ -1518,7 +1523,7 @@ public class HighlighterSearchIT extends ESIntegTestCase { assertHighlight(response, 0, "tags", 1, 2, equalTo("here is another one that is very long tag and has the tag token near the end")); response = client().prepareSearch("test") - .setQuery(QueryBuilders.matchQuery("tags", "long tag").type(MatchQueryBuilder.Type.PHRASE)) + .setQuery(QueryBuilders.matchQuery("tags", "long tag").type(MatchQuery.Type.PHRASE)) .addHighlightedField(new HighlightBuilder.Field("tags") .fragmentSize(-1).numOfFragments(2).fragmenter("span")).get(); @@ -1526,7 +1531,7 @@ public class HighlighterSearchIT extends ESIntegTestCase { assertHighlight(response, 0, "tags", 1, 2, equalTo("here is another one that is very long tag and has the tag token near the end")); assertFailures(client().prepareSearch("test") - .setQuery(QueryBuilders.matchQuery("tags", "long tag").type(MatchQueryBuilder.Type.PHRASE)) + .setQuery(QueryBuilders.matchQuery("tags", "long tag").type(MatchQuery.Type.PHRASE)) .addHighlightedField(new HighlightBuilder.Field("tags") .fragmentSize(-1).numOfFragments(2).fragmenter("invalid")), RestStatus.BAD_REQUEST, @@ -1581,7 +1586,7 @@ public class HighlighterSearchIT extends ESIntegTestCase { // This query used to fail when the field to highlight was absent SearchResponse response = client().prepareSearch("test") - .setQuery(QueryBuilders.matchQuery("field", "highlight").type(MatchQueryBuilder.Type.BOOLEAN)) + .setQuery(QueryBuilders.matchQuery("field", "highlight").type(MatchQuery.Type.BOOLEAN)) .addHighlightedField(new HighlightBuilder.Field("highlight_field") .fragmentSize(-1).numOfFragments(1).fragmenter("simple")).get(); assertThat(response.getHits().hits()[0].highlightFields().isEmpty(), equalTo(true)); @@ -1601,7 +1606,7 @@ public class HighlighterSearchIT extends ESIntegTestCase { refresh(); SearchResponse response = client().prepareSearch("test") - .setQuery(QueryBuilders.matchQuery("text", "test").type(MatchQueryBuilder.Type.BOOLEAN)) + .setQuery(QueryBuilders.matchQuery("text", "test").type(MatchQuery.Type.BOOLEAN)) .addHighlightedField("text") .addHighlightedField("byte") .addHighlightedField("short") @@ -1631,7 +1636,7 @@ public class HighlighterSearchIT extends ESIntegTestCase { refresh(); SearchResponse response = client().prepareSearch("test") - .setQuery(QueryBuilders.matchQuery("text", "test").type(MatchQueryBuilder.Type.BOOLEAN)) + .setQuery(QueryBuilders.matchQuery("text", "test").type(MatchQuery.Type.BOOLEAN)) .addHighlightedField("text").execute().actionGet(); // PatternAnalyzer will throw an exception if it is resetted twice assertHitCount(response, 1l); @@ -2114,7 +2119,7 @@ public class HighlighterSearchIT extends ESIntegTestCase { } else { supportedQueryTypes = MultiMatchQueryBuilder.Type.values(); } - MultiMatchQueryBuilder.Type matchQueryType = rarely() ? null : RandomPicks.randomFrom(getRandom(), supportedQueryTypes); + MultiMatchQueryBuilder.Type matchQueryType = RandomPicks.randomFrom(getRandom(), supportedQueryTypes); final MultiMatchQueryBuilder multiMatchQueryBuilder = multiMatchQuery("the quick brown fox", "field1", "field2").type(matchQueryType); SearchSourceBuilder source = searchSource() @@ -2298,7 +2303,7 @@ public class HighlighterSearchIT extends ESIntegTestCase { logger.info("--> highlighting and searching on field1"); SearchSourceBuilder source = searchSource() - .query(boostingQuery().positive(termQuery("field2", "brown")).negative(termQuery("field2", "foobar")).negativeBoost(0.5f)) + .query(boostingQuery(termQuery("field2", "brown"), termQuery("field2", "foobar")).negativeBoost(0.5f)) .highlight(highlight().field("field2").preTags("").postTags("")); SearchResponse searchResponse = client().search(searchRequest("test").source(source)).actionGet(); @@ -2584,10 +2589,10 @@ public class HighlighterSearchIT extends ESIntegTestCase { // Query string boosting the field phraseBoostTestCaseForClauses(highlighterType, 1f, queryStringQuery("highlight words together").field("field1"), - queryStringQuery("\"highlight words together\"").field("field1^100").autoGeneratePhraseQueries(true)); + queryStringQuery("\"highlight words together\"").field("field1", 100).autoGeneratePhraseQueries(true)); } - private

> void + private

> void phraseBoostTestCaseForClauses(String highlighterType, float boost, QueryBuilder terms, P phrase) { Matcher highlightedMatcher = Matchers.either(containsString("highlight words together")).or( containsString("highlight words together")); @@ -2601,10 +2606,10 @@ public class HighlighterSearchIT extends ESIntegTestCase { assertHighlight(response, 0, "field1", 0, 1, highlightedMatcher); phrase.boost(1); // Try with a boosting query - response = search.setQuery(boostingQuery().positive(phrase).negative(terms).boost(boost).negativeBoost(1)).get(); + response = search.setQuery(boostingQuery(phrase, terms).boost(boost).negativeBoost(1)).get(); assertHighlight(response, 0, "field1", 0, 1, highlightedMatcher); // Try with a boosting query using a negative boost - response = search.setQuery(boostingQuery().positive(phrase).negative(terms).boost(1).negativeBoost(1/boost)).get(); + response = search.setQuery(boostingQuery(phrase, terms).boost(1).negativeBoost(1/boost)).get(); assertHighlight(response, 0, "field1", 0, 1, highlightedMatcher); } } diff --git a/core/src/test/java/org/elasticsearch/search/innerhits/InnerHitsIT.java b/core/src/test/java/org/elasticsearch/search/innerhits/InnerHitsIT.java index ba432864c29..16c54c49325 100644 --- a/core/src/test/java/org/elasticsearch/search/innerhits/InnerHitsIT.java +++ b/core/src/test/java/org/elasticsearch/search/innerhits/InnerHitsIT.java @@ -28,7 +28,7 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.query.BoolQueryBuilder; -import org.elasticsearch.index.query.support.QueryInnerHitBuilder; +import org.elasticsearch.index.query.support.QueryInnerHits; import org.elasticsearch.script.Script; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; @@ -88,9 +88,9 @@ public class InnerHitsIT extends ESIntegTestCase { // Inner hits can be defined in two ways: 1) with the query 2) as seperate inner_hit definition SearchRequest[] searchRequests = new SearchRequest[]{ - client().prepareSearch("articles").setQuery(nestedQuery("comments", matchQuery("comments.message", "fox")).innerHit(new QueryInnerHitBuilder().setName("comment"))).request(), + client().prepareSearch("articles").setQuery(nestedQuery("comments", matchQuery("comments.message", "fox")).innerHit(new QueryInnerHits("comment", null))).request(), client().prepareSearch("articles").setQuery(nestedQuery("comments", matchQuery("comments.message", "fox"))) - .addInnerHit("comment", new InnerHitsBuilder.InnerHit().setPath("comments").setQuery(matchQuery("comments.message", "fox"))).request() + .addNestedInnerHits("comment", "comments", new InnerHitsBuilder.InnerHit().setQuery(matchQuery("comments.message", "fox"))).request() }; for (SearchRequest searchRequest : searchRequests) { SearchResponse response = client().search(searchRequest).actionGet(); @@ -112,11 +112,11 @@ public class InnerHitsIT extends ESIntegTestCase { searchRequests = new SearchRequest[] { client().prepareSearch("articles") .setQuery(nestedQuery("comments", matchQuery("comments.message", "elephant"))) - .addInnerHit("comment", new InnerHitsBuilder.InnerHit().setPath("comments").setQuery(matchQuery("comments.message", "elephant"))).request(), + .addNestedInnerHits("comment", "comments", new InnerHitsBuilder.InnerHit().setQuery(matchQuery("comments.message", "elephant"))).request(), client().prepareSearch("articles") - .setQuery(nestedQuery("comments", matchQuery("comments.message", "elephant")).innerHit(new QueryInnerHitBuilder().setName("comment"))).request(), + .setQuery(nestedQuery("comments", matchQuery("comments.message", "elephant")).innerHit(new QueryInnerHits("comment", null))).request(), client().prepareSearch("articles") - .setQuery(nestedQuery("comments", matchQuery("comments.message", "elephant")).innerHit(new QueryInnerHitBuilder().setName("comment").addSort("_doc", SortOrder.DESC))).request() + .setQuery(nestedQuery("comments", matchQuery("comments.message", "elephant")).innerHit(new QueryInnerHits("comment", new InnerHitsBuilder.InnerHit().addSort("_doc", SortOrder.DESC)))).request() }; for (SearchRequest searchRequest : searchRequests) { SearchResponse response = client().search(searchRequest).actionGet(); @@ -138,24 +138,24 @@ public class InnerHitsIT extends ESIntegTestCase { assertThat(innerHits.getAt(2).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(innerHits.getAt(2).getNestedIdentity().getOffset(), equalTo(2)); } - + InnerHitsBuilder.InnerHit innerHit = new InnerHitsBuilder.InnerHit(); + innerHit.highlightBuilder().field("comments.message"); + innerHit.setExplain(true); + innerHit.addFieldDataField("comments.message"); + innerHit.addScriptField("script", new Script("doc['comments.message'].value")); + innerHit.setSize(1); searchRequests = new SearchRequest[] { client().prepareSearch("articles") .setQuery(nestedQuery("comments", matchQuery("comments.message", "fox"))) - .addInnerHit("comments", new InnerHitsBuilder.InnerHit().setPath("comments") + .addNestedInnerHits("comments", "comments", new InnerHitsBuilder.InnerHit() .setQuery(matchQuery("comments.message", "fox")) .addHighlightedField("comments.message") .setExplain(true) .addFieldDataField("comments.message") - .addScriptField("script", new Script("doc['comments.message'].value")) + .addScriptField("script", new Script("doc['comments.message'].value")) .setSize(1)).request(), client().prepareSearch("articles") - .setQuery(nestedQuery("comments", matchQuery("comments.message", "fox")).innerHit(new QueryInnerHitBuilder() - .addHighlightedField("comments.message") - .setExplain(true) - .addFieldDataField("comments.message") - .addScriptField("script", new Script("doc['comments.message'].value")) - .setSize(1))).request() + .setQuery(nestedQuery("comments", matchQuery("comments.message", "fox")).innerHit(new QueryInnerHits(null, innerHit))).request() }; for (SearchRequest searchRequest : searchRequests) { @@ -201,17 +201,17 @@ public class InnerHitsIT extends ESIntegTestCase { searchResponse = client().prepareSearch("idx") .setSize(numDocs) .addSort("_uid", SortOrder.ASC) - .addInnerHit("a", new InnerHitsBuilder.InnerHit().setPath("field1").addSort("_doc", SortOrder.DESC).setSize(size)) // Sort order is DESC, because we reverse the inner objects during indexing! - .addInnerHit("b", new InnerHitsBuilder.InnerHit().setPath("field2").addSort("_doc", SortOrder.DESC).setSize(size)) + .addNestedInnerHits("a", "field1", new InnerHitsBuilder.InnerHit().addSort("_doc", SortOrder.DESC).setSize(size)) // Sort order is DESC, because we reverse the inner objects during indexing! + .addNestedInnerHits("b", "field2", new InnerHitsBuilder.InnerHit().addSort("_doc", SortOrder.DESC).setSize(size)) .get(); } else { BoolQueryBuilder boolQuery = new BoolQueryBuilder(); if (randomBoolean()) { - boolQuery.should(nestedQuery("field1", matchAllQuery()).innerHit(new QueryInnerHitBuilder().setName("a").addSort("_doc", SortOrder.DESC).setSize(size))); - boolQuery.should(nestedQuery("field2", matchAllQuery()).innerHit(new QueryInnerHitBuilder().setName("b").addSort("_doc", SortOrder.DESC).setSize(size))); + boolQuery.should(nestedQuery("field1", matchAllQuery()).innerHit(new QueryInnerHits("a", new InnerHitsBuilder.InnerHit().addSort("_doc", SortOrder.DESC).setSize(size)))); + boolQuery.should(nestedQuery("field2", matchAllQuery()).innerHit(new QueryInnerHits("b", new InnerHitsBuilder.InnerHit().addSort("_doc", SortOrder.DESC).setSize(size)))); } else { - boolQuery.should(constantScoreQuery(nestedQuery("field1", matchAllQuery()).innerHit(new QueryInnerHitBuilder().setName("a").addSort("_doc", SortOrder.DESC).setSize(size)))); - boolQuery.should(constantScoreQuery(nestedQuery("field2", matchAllQuery()).innerHit(new QueryInnerHitBuilder().setName("b").addSort("_doc", SortOrder.DESC).setSize(size)))); + boolQuery.should(constantScoreQuery(nestedQuery("field1", matchAllQuery()).innerHit(new QueryInnerHits("a", new InnerHitsBuilder.InnerHit().addSort("_doc", SortOrder.DESC).setSize(size))))); + boolQuery.should(constantScoreQuery(nestedQuery("field2", matchAllQuery()).innerHit(new QueryInnerHits("b", new InnerHitsBuilder.InnerHit().addSort("_doc", SortOrder.DESC).setSize(size))))); } searchResponse = client().prepareSearch("idx") .setQuery(boolQuery) @@ -267,10 +267,10 @@ public class InnerHitsIT extends ESIntegTestCase { SearchRequest[] searchRequests = new SearchRequest[]{ client().prepareSearch("articles") .setQuery(hasChildQuery("comment", matchQuery("message", "fox"))) - .addInnerHit("comment", new InnerHitsBuilder.InnerHit().setType("comment").setQuery(matchQuery("message", "fox"))) + .addParentChildInnerHits("comment", "comment", new InnerHitsBuilder.InnerHit().setQuery(matchQuery("message", "fox"))) .request(), client().prepareSearch("articles") - .setQuery(hasChildQuery("comment", matchQuery("message", "fox")).innerHit(new QueryInnerHitBuilder().setName("comment"))) + .setQuery(hasChildQuery("comment", matchQuery("message", "fox")).innerHit(new QueryInnerHits("comment", null))) .request() }; for (SearchRequest searchRequest : searchRequests) { @@ -293,10 +293,10 @@ public class InnerHitsIT extends ESIntegTestCase { searchRequests = new SearchRequest[] { client().prepareSearch("articles") .setQuery(hasChildQuery("comment", matchQuery("message", "elephant"))) - .addInnerHit("comment", new InnerHitsBuilder.InnerHit().setType("comment").setQuery(matchQuery("message", "elephant"))) + .addParentChildInnerHits("comment", "comment", new InnerHitsBuilder.InnerHit().setQuery(matchQuery("message", "elephant"))) .request(), client().prepareSearch("articles") - .setQuery(hasChildQuery("comment", matchQuery("message", "elephant")).innerHit(new QueryInnerHitBuilder())) + .setQuery(hasChildQuery("comment", matchQuery("message", "elephant")).innerHit(new QueryInnerHits())) .request() }; for (SearchRequest searchRequest : searchRequests) { @@ -316,11 +316,16 @@ public class InnerHitsIT extends ESIntegTestCase { assertThat(innerHits.getAt(2).getId(), equalTo("6")); assertThat(innerHits.getAt(2).type(), equalTo("comment")); } - + InnerHitsBuilder.InnerHit innerHit = new InnerHitsBuilder.InnerHit(); + innerHit.highlightBuilder().field("message"); + innerHit.setExplain(true); + innerHit.addFieldDataField("message"); + innerHit.addScriptField("script", new Script("doc['message'].value")); + innerHit.setSize(1); searchRequests = new SearchRequest[] { client().prepareSearch("articles") .setQuery(hasChildQuery("comment", matchQuery("message", "fox"))) - .addInnerHit("comment", new InnerHitsBuilder.InnerHit().setType("comment") + .addParentChildInnerHits("comment", "comment", new InnerHitsBuilder.InnerHit() .setQuery(matchQuery("message", "fox")) .addHighlightedField("message") .setExplain(true) @@ -328,12 +333,11 @@ public class InnerHitsIT extends ESIntegTestCase { .addScriptField("script", new Script("doc['message'].value")) .setSize(1) ).request(), + client().prepareSearch("articles") .setQuery( hasChildQuery("comment", matchQuery("message", "fox")).innerHit( - new QueryInnerHitBuilder().addHighlightedField("message").setExplain(true) - .addFieldDataField("message").addScriptField("script", new Script("doc['message'].value")) - .setSize(1))).request() }; + new QueryInnerHits(null, innerHit))).request() }; for (SearchRequest searchRequest : searchRequests) { SearchResponse response = client().search(searchRequest).actionGet(); @@ -385,17 +389,17 @@ public class InnerHitsIT extends ESIntegTestCase { .setSize(numDocs) .setTypes("parent") .addSort("_uid", SortOrder.ASC) - .addInnerHit("a", new InnerHitsBuilder.InnerHit().setType("child1").addSort("_uid", SortOrder.ASC).setSize(size)) - .addInnerHit("b", new InnerHitsBuilder.InnerHit().setType("child2").addSort("_uid", SortOrder.ASC).setSize(size)) + .addParentChildInnerHits("a", "child1", new InnerHitsBuilder.InnerHit().addSort("_uid", SortOrder.ASC).setSize(size)) + .addParentChildInnerHits("b", "child2", new InnerHitsBuilder.InnerHit().addSort("_uid", SortOrder.ASC).setSize(size)) .get(); } else { BoolQueryBuilder boolQuery = new BoolQueryBuilder(); if (randomBoolean()) { - boolQuery.should(hasChildQuery("child1", matchAllQuery()).innerHit(new QueryInnerHitBuilder().setName("a").addSort("_uid", SortOrder.ASC).setSize(size))); - boolQuery.should(hasChildQuery("child2", matchAllQuery()).innerHit(new QueryInnerHitBuilder().setName("b").addSort("_uid", SortOrder.ASC).setSize(size))); + boolQuery.should(hasChildQuery("child1", matchAllQuery()).innerHit(new QueryInnerHits("a", new InnerHitsBuilder.InnerHit().addSort("_uid", SortOrder.ASC).setSize(size)))); + boolQuery.should(hasChildQuery("child2", matchAllQuery()).innerHit(new QueryInnerHits("b", new InnerHitsBuilder.InnerHit().addSort("_uid", SortOrder.ASC).setSize(size)))); } else { - boolQuery.should(constantScoreQuery(hasChildQuery("child1", matchAllQuery()).innerHit(new QueryInnerHitBuilder().setName("a").addSort("_uid", SortOrder.ASC).setSize(size)))); - boolQuery.should(constantScoreQuery(hasChildQuery("child2", matchAllQuery()).innerHit(new QueryInnerHitBuilder().setName("b").addSort("_uid", SortOrder.ASC).setSize(size)))); + boolQuery.should(constantScoreQuery(hasChildQuery("child1", matchAllQuery()).innerHit(new QueryInnerHits("a", new InnerHitsBuilder.InnerHit().addSort("_uid", SortOrder.ASC).setSize(size))))); + boolQuery.should(constantScoreQuery(hasChildQuery("child2", matchAllQuery()).innerHit(new QueryInnerHits("b", new InnerHitsBuilder.InnerHit().addSort("_uid", SortOrder.ASC).setSize(size))))); } searchResponse = client().prepareSearch("idx") .setSize(numDocs) @@ -447,7 +451,7 @@ public class InnerHitsIT extends ESIntegTestCase { ensureGreen("articles"); try { client().prepareSearch("articles") - .addInnerHit("comment", new InnerHitsBuilder.InnerHit()) + .addParentChildInnerHits("comment", null, new InnerHitsBuilder.InnerHit()) .get(); } catch (Exception e) { assertThat(e.getMessage(), containsString("Failed to build")); @@ -474,7 +478,7 @@ public class InnerHitsIT extends ESIntegTestCase { .setQuery( boolQuery() .must(matchQuery("body", "fail2ban")) - .must(hasParentQuery("question", matchAllQuery()).innerHit(new QueryInnerHitBuilder())) + .must(hasParentQuery("question", matchAllQuery()).innerHit(new QueryInnerHits())) ).get(); assertNoFailures(response); assertHitCount(response, 2); @@ -513,10 +517,10 @@ public class InnerHitsIT extends ESIntegTestCase { SearchResponse response = client().prepareSearch("articles") .setQuery(hasChildQuery("comment", hasChildQuery("remark", matchQuery("message", "good")))) - .addInnerHit("comment", - new InnerHitsBuilder.InnerHit().setType("comment") + .addParentChildInnerHits("comment", "comment", + new InnerHitsBuilder.InnerHit() .setQuery(hasChildQuery("remark", matchQuery("message", "good"))) - .addInnerHit("remark", new InnerHitsBuilder.InnerHit().setType("remark").setQuery(matchQuery("message", "good"))) + .addParentChildInnerHits("remark", "remark", new InnerHitsBuilder.InnerHit().setQuery(matchQuery("message", "good"))) ) .get(); @@ -537,10 +541,10 @@ public class InnerHitsIT extends ESIntegTestCase { response = client().prepareSearch("articles") .setQuery(hasChildQuery("comment", hasChildQuery("remark", matchQuery("message", "bad")))) - .addInnerHit("comment", - new InnerHitsBuilder.InnerHit().setType("comment") + .addParentChildInnerHits("comment", "comment", + new InnerHitsBuilder.InnerHit() .setQuery(hasChildQuery("remark", matchQuery("message", "bad"))) - .addInnerHit("remark", new InnerHitsBuilder.InnerHit().setType("remark").setQuery(matchQuery("message", "bad"))) + .addParentChildInnerHits("remark", "remark", new InnerHitsBuilder.InnerHit().setQuery(matchQuery("message", "bad"))) ) .get(); @@ -605,10 +609,9 @@ public class InnerHitsIT extends ESIntegTestCase { SearchResponse response = client().prepareSearch("articles") .setQuery(nestedQuery("comments", nestedQuery("comments.remarks", matchQuery("comments.remarks.message", "good")))) - .addInnerHit("comment", new InnerHitsBuilder.InnerHit() - .setPath("comments") + .addNestedInnerHits("comment", "comments", new InnerHitsBuilder.InnerHit() .setQuery(nestedQuery("comments.remarks", matchQuery("comments.remarks.message", "good"))) - .addInnerHit("remark", new InnerHitsBuilder.InnerHit().setPath("comments.remarks").setQuery(matchQuery("comments.remarks.message", "good"))) + .addNestedInnerHits("remark", "comments.remarks", new InnerHitsBuilder.InnerHit().setQuery(matchQuery("comments.remarks.message", "good"))) ).get(); assertNoFailures(response); assertHitCount(response, 1); @@ -631,7 +634,7 @@ public class InnerHitsIT extends ESIntegTestCase { // Directly refer to the second level: response = client().prepareSearch("articles") - .setQuery(nestedQuery("comments.remarks", matchQuery("comments.remarks.message", "bad")).innerHit(new QueryInnerHitBuilder())) + .setQuery(nestedQuery("comments.remarks", matchQuery("comments.remarks.message", "bad")).innerHit(new QueryInnerHits())) .get(); assertNoFailures(response); assertHitCount(response, 1); @@ -648,10 +651,9 @@ public class InnerHitsIT extends ESIntegTestCase { response = client().prepareSearch("articles") .setQuery(nestedQuery("comments", nestedQuery("comments.remarks", matchQuery("comments.remarks.message", "bad")))) - .addInnerHit("comment", new InnerHitsBuilder.InnerHit() - .setPath("comments") - .setQuery(nestedQuery("comments.remarks", matchQuery("comments.remarks.message", "bad"))) - .addInnerHit("remark", new InnerHitsBuilder.InnerHit().setPath("comments.remarks").setQuery(matchQuery("comments.remarks.message", "bad")))) + .addNestedInnerHits("comment", "comments", new InnerHitsBuilder.InnerHit() + .setQuery(nestedQuery("comments.remarks", matchQuery("comments.remarks.message", "bad"))) + .addNestedInnerHits("remark", "comments.remarks", new InnerHitsBuilder.InnerHit().setQuery(matchQuery("comments.remarks.message", "bad")))) .get(); assertNoFailures(response); assertHitCount(response, 1); @@ -686,7 +688,7 @@ public class InnerHitsIT extends ESIntegTestCase { indexRandom(true, requests); SearchResponse response = client().prepareSearch("articles") - .setQuery(nestedQuery("comments", matchQuery("comments.message", "fox")).innerHit(new QueryInnerHitBuilder())) + .setQuery(nestedQuery("comments", matchQuery("comments.message", "fox")).innerHit(new QueryInnerHits())) .get(); assertNoFailures(response); assertHitCount(response, 1); @@ -724,8 +726,8 @@ public class InnerHitsIT extends ESIntegTestCase { indexRandom(true, requests); SearchResponse response = client().prepareSearch("articles") - .setQuery(nestedQuery("comments", matchQuery("comments.message", "fox")).innerHit(new QueryInnerHitBuilder().field("comments.message"))) - .get(); + .setQuery(nestedQuery("comments", matchQuery("comments.message", "fox")).innerHit(new QueryInnerHits(null, new InnerHitsBuilder.InnerHit().field("comments.message")))) + .get(); assertNoFailures(response); assertHitCount(response, 1); assertThat(response.getHits().getAt(0).id(), equalTo("1")); @@ -761,9 +763,10 @@ public class InnerHitsIT extends ESIntegTestCase { .startObject("comments").field("message", "fox eat quick").endObject() .endObject())); indexRandom(true, requests); - + InnerHitsBuilder.InnerHit builder = new InnerHitsBuilder.InnerHit(); + builder.highlightBuilder().field("comments.message"); SearchResponse response = client().prepareSearch("articles") - .setQuery(nestedQuery("comments", matchQuery("comments.message", "fox")).innerHit(new QueryInnerHitBuilder().addHighlightedField("comments.message"))) + .setQuery(nestedQuery("comments", matchQuery("comments.message", "fox")).innerHit(new QueryInnerHits(null, builder))) .get(); assertNoFailures(response); assertHitCount(response, 1); @@ -782,13 +785,13 @@ public class InnerHitsIT extends ESIntegTestCase { .addMapping("article", jsonBuilder().startObject() .startObject("_source").field("excludes", new String[]{"comments"}).endObject() .startObject("properties") - .startObject("comments") - .field("type", "nested") - .startObject("properties") - .startObject("message").field("type", "string").field("store", "yes").endObject() - .endObject() - .endObject() - .endObject() + .startObject("comments") + .field("type", "nested") + .startObject("properties") + .startObject("message").field("type", "string").field("store", "yes").endObject() + .endObject() + .endObject() + .endObject() .endObject() ) ); @@ -799,9 +802,11 @@ public class InnerHitsIT extends ESIntegTestCase { .startObject("comments").field("message", "fox eat quick").endObject() .endObject())); indexRandom(true, requests); - + InnerHitsBuilder.InnerHit builder = new InnerHitsBuilder.InnerHit(); + builder.field("comments.message"); + builder.setFetchSource(true); SearchResponse response = client().prepareSearch("articles") - .setQuery(nestedQuery("comments", matchQuery("comments.message", "fox")).innerHit(new QueryInnerHitBuilder().field("comments.message").setFetchSource(true))) + .setQuery(nestedQuery("comments", matchQuery("comments.message", "fox")).innerHit(new QueryInnerHits(null, builder))) .get(); assertNoFailures(response); assertHitCount(response, 1); @@ -837,10 +842,11 @@ public class InnerHitsIT extends ESIntegTestCase { .startObject("comments").field("message", "fox eat quick").endObject() .endObject())); indexRandom(true, requests); - + InnerHitsBuilder.InnerHit builder = new InnerHitsBuilder.InnerHit(); + builder.highlightBuilder().field("comments.message"); SearchResponse response = client().prepareSearch("articles") - .setQuery(nestedQuery("comments", matchQuery("comments.message", "fox")).innerHit(new QueryInnerHitBuilder().addHighlightedField("comments.message"))) - .get(); + .setQuery(nestedQuery("comments", matchQuery("comments.message", "fox")).innerHit(new QueryInnerHits(null, builder))) + .get(); assertNoFailures(response); assertHitCount(response, 1); assertThat(response.getHits().getAt(0).id(), equalTo("1")); @@ -881,7 +887,7 @@ public class InnerHitsIT extends ESIntegTestCase { indexRandom(true, requests); SearchResponse response = client().prepareSearch("articles") - .setQuery(nestedQuery("comments.messages", matchQuery("comments.messages.message", "fox")).innerHit(new QueryInnerHitBuilder())) + .setQuery(nestedQuery("comments.messages", matchQuery("comments.messages.message", "fox")).innerHit(new QueryInnerHits())) .get(); assertNoFailures(response); assertHitCount(response, 1); @@ -893,7 +899,7 @@ public class InnerHitsIT extends ESIntegTestCase { assertThat(response.getHits().getAt(0).getInnerHits().get("comments.messages").getAt(0).getNestedIdentity().getChild(), nullValue()); response = client().prepareSearch("articles") - .setQuery(nestedQuery("comments.messages", matchQuery("comments.messages.message", "bear")).innerHit(new QueryInnerHitBuilder())) + .setQuery(nestedQuery("comments.messages", matchQuery("comments.messages.message", "bear")).innerHit(new QueryInnerHits())) .get(); assertNoFailures(response); assertHitCount(response, 1); @@ -912,7 +918,7 @@ public class InnerHitsIT extends ESIntegTestCase { .endObject())); indexRandom(true, requests); response = client().prepareSearch("articles") - .setQuery(nestedQuery("comments.messages", matchQuery("comments.messages.message", "fox")).innerHit(new QueryInnerHitBuilder())) + .setQuery(nestedQuery("comments.messages", matchQuery("comments.messages.message", "fox")).innerHit(new QueryInnerHits())) .get(); assertNoFailures(response); assertHitCount(response, 1); @@ -928,11 +934,11 @@ public class InnerHitsIT extends ESIntegTestCase { public void testRoyals() throws Exception { assertAcked( prepareCreate("royals") - .addMapping("king") - .addMapping("prince", "_parent", "type=king") - .addMapping("duke", "_parent", "type=prince") - .addMapping("earl", "_parent", "type=duke") - .addMapping("baron", "_parent", "type=earl") + .addMapping("king") + .addMapping("prince", "_parent", "type=king") + .addMapping("duke", "_parent", "type=prince") + .addMapping("earl", "_parent", "type=duke") + .addMapping("baron", "_parent", "type=earl") ); List requests = new ArrayList<>(); @@ -951,15 +957,14 @@ public class InnerHitsIT extends ESIntegTestCase { SearchResponse response = client().prepareSearch("royals") .setTypes("duke") - .addInnerHit("earls", new InnerHitsBuilder.InnerHit() - .setType("earl") + .addParentChildInnerHits("earls", "earl", new InnerHitsBuilder.InnerHit() .addSort(SortBuilders.fieldSort("_uid").order(SortOrder.ASC)) .setSize(4) - .addInnerHit("barons", new InnerHitsBuilder.InnerHit().setType("baron")) + .addParentChildInnerHits("barons", "baron", new InnerHitsBuilder.InnerHit()) ) - .addInnerHit("princes", - new InnerHitsBuilder.InnerHit().setType("prince") - .addInnerHit("kings", new InnerHitsBuilder.InnerHit().setType("king")) + .addParentChildInnerHits("princes", "prince", + new InnerHitsBuilder.InnerHit() + .addParentChildInnerHits("kings", "king", new InnerHitsBuilder.InnerHit()) ) .get(); assertHitCount(response, 1); @@ -1067,7 +1072,7 @@ public class InnerHitsIT extends ESIntegTestCase { .should(termQuery("nested1.n_field1", "n_value1_1").queryName("test1")) .should(termQuery("nested1.n_field1", "n_value1_3").queryName("test2")) .should(termQuery("nested1.n_field2", "n_value2_2").queryName("test3")) - ).innerHit(new QueryInnerHitBuilder().addSort("nested1.n_field1", SortOrder.ASC))) + ).innerHit(new QueryInnerHits(null, new InnerHitsBuilder.InnerHit().addSort("nested1.n_field1", SortOrder.ASC)))) .setSize(numDocs) .addSort("field1", SortOrder.ASC) .get(); @@ -1107,7 +1112,7 @@ public class InnerHitsIT extends ESIntegTestCase { indexRandom(true, requests); SearchResponse response = client().prepareSearch("index") - .setQuery(hasChildQuery("child", matchQuery("field", "value1").queryName("_name1")).innerHit(new QueryInnerHitBuilder())) + .setQuery(hasChildQuery("child", matchQuery("field", "value1").queryName("_name1")).innerHit(new QueryInnerHits())) .addSort("_uid", SortOrder.ASC) .get(); assertHitCount(response, 2); @@ -1122,7 +1127,7 @@ public class InnerHitsIT extends ESIntegTestCase { assertThat(response.getHits().getAt(1).getInnerHits().get("child").getAt(0).getMatchedQueries()[0], equalTo("_name1")); response = client().prepareSearch("index") - .setQuery(hasChildQuery("child", matchQuery("field", "value2").queryName("_name2")).innerHit(new QueryInnerHitBuilder())) + .setQuery(hasChildQuery("child", matchQuery("field", "value2").queryName("_name2")).innerHit(new QueryInnerHits())) .addSort("_id", SortOrder.ASC) .get(); assertHitCount(response, 1); @@ -1141,7 +1146,7 @@ public class InnerHitsIT extends ESIntegTestCase { indexRandom(true, requests); SearchResponse response = client().prepareSearch("index1") - .setQuery(hasChildQuery("child", matchQuery("field", "value1")).innerHit(new QueryInnerHitBuilder().setSize(ArrayUtil.MAX_ARRAY_LENGTH - 1))) + .setQuery(hasChildQuery("child", matchQuery("field", "value1")).innerHit(new QueryInnerHits(null, new InnerHitsBuilder.InnerHit().setSize(ArrayUtil.MAX_ARRAY_LENGTH - 1)))) .addSort("_uid", SortOrder.ASC) .get(); assertNoFailures(response); @@ -1159,7 +1164,7 @@ public class InnerHitsIT extends ESIntegTestCase { .get(); response = client().prepareSearch("index2") - .setQuery(nestedQuery("nested", matchQuery("nested.field", "value1")).innerHit(new QueryInnerHitBuilder().setSize(ArrayUtil.MAX_ARRAY_LENGTH - 1))) + .setQuery(nestedQuery("nested", matchQuery("nested.field", "value1")).innerHit(new QueryInnerHits(null, new InnerHitsBuilder.InnerHit().setSize(ArrayUtil.MAX_ARRAY_LENGTH - 1)))) .addSort("_uid", SortOrder.ASC) .get(); assertNoFailures(response); diff --git a/core/src/test/java/org/elasticsearch/search/morelikethis/ItemSerializationTests.java b/core/src/test/java/org/elasticsearch/search/morelikethis/ItemSerializationTests.java deleted file mode 100644 index 5f5f42aa7b2..00000000000 --- a/core/src/test/java/org/elasticsearch/search/morelikethis/ItemSerializationTests.java +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.search.morelikethis; - -import com.carrotsearch.randomizedtesting.generators.RandomPicks; -import org.elasticsearch.common.ParseFieldMatcher; -import org.elasticsearch.common.xcontent.ToXContent; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.VersionType; -import org.elasticsearch.index.query.MoreLikeThisQueryBuilder.Item; -import org.elasticsearch.test.ESTestCase; -import org.junit.Test; - -import java.util.Random; - -public class ItemSerializationTests extends ESTestCase { - - private Item generateRandomItem(int arraySize, int stringSize) { - String index = randomAsciiOfLength(stringSize); - String type = randomAsciiOfLength(stringSize); - String id = String.valueOf(Math.abs(randomInt())); - String[] fields = generateRandomStringArray(arraySize, stringSize, true); - String routing = randomBoolean() ? randomAsciiOfLength(stringSize) : null; - long version = Math.abs(randomLong()); - VersionType versionType = RandomPicks.randomFrom(new Random(), VersionType.values()); - return new Item(index, type, id).fields(fields).routing(routing).version(version).versionType(versionType); - } - - @Test - public void testItemSerialization() throws Exception { - int numOfTrials = 100; - int maxArraySize = 7; - int maxStringSize = 8; - for (int i = 0; i < numOfTrials; i++) { - Item item1 = generateRandomItem(maxArraySize, maxStringSize); - String json = item1.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS).string(); - XContentParser parser = XContentFactory.xContent(json).createParser(json); - Item item2 = Item.parse(parser, ParseFieldMatcher.STRICT, new Item()); - assertEquals(item1, item2); - } - } -} diff --git a/core/src/test/java/org/elasticsearch/search/morelikethis/MoreLikeThisIT.java b/core/src/test/java/org/elasticsearch/search/morelikethis/MoreLikeThisIT.java index bbc992f75ae..0e7001555fe 100644 --- a/core/src/test/java/org/elasticsearch/search/morelikethis/MoreLikeThisIT.java +++ b/core/src/test/java/org/elasticsearch/search/morelikethis/MoreLikeThisIT.java @@ -39,6 +39,7 @@ import java.util.ArrayList; import java.util.List; import java.util.concurrent.ExecutionException; +import static org.elasticsearch.index.query.MoreLikeThisQueryBuilder.ids; import static org.elasticsearch.client.Requests.*; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; @@ -72,7 +73,7 @@ public class MoreLikeThisIT extends ESIntegTestCase { logger.info("Running moreLikeThis"); SearchResponse response = client().prepareSearch().setQuery( - new MoreLikeThisQueryBuilder().addLikeItem(new Item("test", "type1", "1")).minTermFreq(1).minDocFreq(1)).get(); + new MoreLikeThisQueryBuilder(null, new Item[] {new Item("test", "type1", "1")}).minTermFreq(1).minDocFreq(1)).get(); assertHitCount(response, 1l); } @@ -92,7 +93,7 @@ public class MoreLikeThisIT extends ESIntegTestCase { logger.info("Running moreLikeThis"); SearchResponse response = client().prepareSearch().setQuery( - new MoreLikeThisQueryBuilder().addLikeItem(new Item("test", "type1", "1")).minTermFreq(1).minDocFreq(1)).get(); + new MoreLikeThisQueryBuilder(null, new Item[] {new Item("test", "type1", "1")}).minTermFreq(1).minDocFreq(1)).get(); assertHitCount(response, 0l); } @@ -119,24 +120,24 @@ public class MoreLikeThisIT extends ESIntegTestCase { logger.info("Running moreLikeThis on index"); SearchResponse response = client().prepareSearch().setQuery( - new MoreLikeThisQueryBuilder().addLikeItem(new Item("test", "type1", "1")).minTermFreq(1).minDocFreq(1)).get(); + new MoreLikeThisQueryBuilder(null, new Item[] {new Item("test", "type1", "1")}).minTermFreq(1).minDocFreq(1)).get(); assertHitCount(response, 2l); logger.info("Running moreLikeThis on beta shard"); response = client().prepareSearch("beta").setQuery( - new MoreLikeThisQueryBuilder().addLikeItem(new Item("test", "type1", "1")).minTermFreq(1).minDocFreq(1)).get(); + new MoreLikeThisQueryBuilder(null, new Item[] {new Item("test", "type1", "1")}).minTermFreq(1).minDocFreq(1)).get(); assertHitCount(response, 1l); assertThat(response.getHits().getAt(0).id(), equalTo("3")); logger.info("Running moreLikeThis on release shard"); response = client().prepareSearch("release").setQuery( - new MoreLikeThisQueryBuilder().addLikeItem(new Item("test", "type1", "1")).minTermFreq(1).minDocFreq(1)).get(); + new MoreLikeThisQueryBuilder(null, new Item[] {new Item("test", "type1", "1")}).minTermFreq(1).minDocFreq(1)).get(); assertHitCount(response, 1l); assertThat(response.getHits().getAt(0).id(), equalTo("2")); logger.info("Running moreLikeThis on alias with node client"); response = internalCluster().clientNodeClient().prepareSearch("beta").setQuery( - new MoreLikeThisQueryBuilder().addLikeItem(new Item("test", "type1", "1")).minTermFreq(1).minDocFreq(1)).get(); + new MoreLikeThisQueryBuilder(null, new Item[] {new Item("test", "type1", "1")}).minTermFreq(1).minDocFreq(1)).get(); assertHitCount(response, 1l); assertThat(response.getHits().getAt(0).id(), equalTo("3")); } @@ -156,11 +157,11 @@ public class MoreLikeThisIT extends ESIntegTestCase { assertThat(ensureGreen(), equalTo(ClusterHealthStatus.GREEN)); SearchResponse response = client().prepareSearch().setQuery( - new MoreLikeThisQueryBuilder().addLikeItem(new Item("foo", "bar", "1"))).get(); + new MoreLikeThisQueryBuilder(null, new Item[] {new Item("foo", "bar", "1")})).get(); assertNoFailures(response); assertThat(response, notNullValue()); response = client().prepareSearch().setQuery( - new MoreLikeThisQueryBuilder().addLikeItem(new Item("foo", "bar", "1"))).get(); + new MoreLikeThisQueryBuilder(null, new Item[] {new Item("foo", "bar", "1")})).get(); assertNoFailures(response); assertThat(response, notNullValue()); } @@ -182,7 +183,7 @@ public class MoreLikeThisIT extends ESIntegTestCase { client().admin().indices().prepareRefresh("foo").execute().actionGet(); SearchResponse response = client().prepareSearch().setQuery( - new MoreLikeThisQueryBuilder().addLikeItem(new Item("foo", "bar", "1").routing("2"))).get(); + new MoreLikeThisQueryBuilder(null, new Item[] {new Item("foo", "bar", "1").routing("2")})).get(); assertNoFailures(response); assertThat(response, notNullValue()); } @@ -205,7 +206,7 @@ public class MoreLikeThisIT extends ESIntegTestCase { .execute().actionGet(); client().admin().indices().prepareRefresh("foo").execute().actionGet(); SearchResponse response = client().prepareSearch().setQuery( - new MoreLikeThisQueryBuilder().addLikeItem(new Item("foo", "bar", "1").routing("4000"))).get(); + new MoreLikeThisQueryBuilder(null, new Item[] {new Item("foo", "bar", "1").routing("4000")})).get(); assertNoFailures(response); assertThat(response, notNullValue()); } @@ -233,41 +234,41 @@ public class MoreLikeThisIT extends ESIntegTestCase { // Implicit list of fields -> ignore numeric fields SearchResponse searchResponse = client().prepareSearch().setQuery( - new MoreLikeThisQueryBuilder().addLikeItem(new Item("test", "type", "1")).minTermFreq(1).minDocFreq(1)).get(); + new MoreLikeThisQueryBuilder(null, new Item[] {new Item("test", "type", "1")}).minTermFreq(1).minDocFreq(1)).get(); assertHitCount(searchResponse, 1l); // Explicit list of fields including numeric fields -> fail assertThrows(client().prepareSearch().setQuery( - new MoreLikeThisQueryBuilder("string_value", "int_value").addLikeItem(new Item("test", "type", "1")).minTermFreq(1).minDocFreq(1)), SearchPhaseExecutionException.class); + new MoreLikeThisQueryBuilder(new String[] {"string_value", "int_value"}, null, new Item[] {new Item("test", "type", "1")}).minTermFreq(1).minDocFreq(1)), SearchPhaseExecutionException.class); // mlt query with no field -> OK - searchResponse = client().prepareSearch().setQuery(moreLikeThisQuery().likeText("index").minTermFreq(1).minDocFreq(1)).execute().actionGet(); + searchResponse = client().prepareSearch().setQuery(moreLikeThisQuery(new String[] {"index"}).minTermFreq(1).minDocFreq(1)).execute().actionGet(); assertHitCount(searchResponse, 2l); // mlt query with string fields - searchResponse = client().prepareSearch().setQuery(moreLikeThisQuery("string_value").likeText("index").minTermFreq(1).minDocFreq(1)).execute().actionGet(); + searchResponse = client().prepareSearch().setQuery(moreLikeThisQuery(new String[]{"string_value"}, new String[] {"index"}, null).minTermFreq(1).minDocFreq(1)).execute().actionGet(); assertHitCount(searchResponse, 2l); // mlt query with at least a numeric field -> fail by default - assertThrows(client().prepareSearch().setQuery(moreLikeThisQuery("string_value", "int_value").likeText("index")), SearchPhaseExecutionException.class); + assertThrows(client().prepareSearch().setQuery(moreLikeThisQuery(new String[] {"string_value", "int_value"}, new String[] {"index"}, null)), SearchPhaseExecutionException.class); // mlt query with at least a numeric field -> fail by command - assertThrows(client().prepareSearch().setQuery(moreLikeThisQuery("string_value", "int_value").likeText("index").failOnUnsupportedField(true)), SearchPhaseExecutionException.class); + assertThrows(client().prepareSearch().setQuery(moreLikeThisQuery(new String[] {"string_value", "int_value"}, new String[] {"index"}, null).failOnUnsupportedField(true)), SearchPhaseExecutionException.class); // mlt query with at least a numeric field but fail_on_unsupported_field set to false - searchResponse = client().prepareSearch().setQuery(moreLikeThisQuery("string_value", "int_value").likeText("index").minTermFreq(1).minDocFreq(1).failOnUnsupportedField(false)).get(); + searchResponse = client().prepareSearch().setQuery(moreLikeThisQuery(new String[] {"string_value", "int_value"}, new String[] {"index"}, null).minTermFreq(1).minDocFreq(1).failOnUnsupportedField(false)).get(); assertHitCount(searchResponse, 2l); // mlt field query on a numeric field -> failure by default - assertThrows(client().prepareSearch().setQuery(moreLikeThisQuery("int_value").likeText("42").minTermFreq(1).minDocFreq(1)), SearchPhaseExecutionException.class); + assertThrows(client().prepareSearch().setQuery(moreLikeThisQuery(new String[] {"int_value"}, new String[] {"42"}, null).minTermFreq(1).minDocFreq(1)), SearchPhaseExecutionException.class); // mlt field query on a numeric field -> failure by command - assertThrows(client().prepareSearch().setQuery(moreLikeThisQuery("int_value").likeText("42").minTermFreq(1).minDocFreq(1).failOnUnsupportedField(true)), + assertThrows(client().prepareSearch().setQuery(moreLikeThisQuery(new String[] {"int_value"}, new String[] {"42"}, null).minTermFreq(1).minDocFreq(1).failOnUnsupportedField(true)), SearchPhaseExecutionException.class); // mlt field query on a numeric field but fail_on_unsupported_field set to false - searchResponse = client().prepareSearch().setQuery(moreLikeThisQuery("int_value").likeText("42").minTermFreq(1).minDocFreq(1).failOnUnsupportedField(false)).execute().actionGet(); + searchResponse = client().prepareSearch().setQuery(moreLikeThisQuery(new String[] {"int_value"}, new String[] {"42"}, null).minTermFreq(1).minDocFreq(1).failOnUnsupportedField(false)).execute().actionGet(); assertHitCount(searchResponse, 0l); } @@ -295,16 +296,16 @@ public class MoreLikeThisIT extends ESIntegTestCase { logger.info("Running More Like This with include true"); SearchResponse response = client().prepareSearch().setQuery( - new MoreLikeThisQueryBuilder().addLikeItem(new Item("test", "type1", "1")).minTermFreq(1).minDocFreq(1).include(true).minimumShouldMatch("0%")).get(); + new MoreLikeThisQueryBuilder(null, new Item[] {new Item("test", "type1", "1")}).minTermFreq(1).minDocFreq(1).include(true).minimumShouldMatch("0%")).get(); assertOrderedSearchHits(response, "1", "2"); response = client().prepareSearch().setQuery( - new MoreLikeThisQueryBuilder().addLikeItem(new Item("test", "type1", "2")).minTermFreq(1).minDocFreq(1).include(true).minimumShouldMatch("0%")).get(); + new MoreLikeThisQueryBuilder(null, new Item[] {new Item("test", "type1", "2")}).minTermFreq(1).minDocFreq(1).include(true).minimumShouldMatch("0%")).get(); assertOrderedSearchHits(response, "2", "1"); logger.info("Running More Like This with include false"); response = client().prepareSearch().setQuery( - new MoreLikeThisQueryBuilder().addLikeItem(new Item("test", "type1", "1")).minTermFreq(1).minDocFreq(1).minimumShouldMatch("0%")).get(); + new MoreLikeThisQueryBuilder(null, new Item[] {new Item("test", "type1", "1")}).minTermFreq(1).minDocFreq(1).minimumShouldMatch("0%")).get(); assertSearchHits(response, "2"); } @@ -326,7 +327,7 @@ public class MoreLikeThisIT extends ESIntegTestCase { indexRandom(true, builders); logger.info("Running MoreLikeThis"); - MoreLikeThisQueryBuilder queryBuilder = QueryBuilders.moreLikeThisQuery("text").ids("1").include(true).minTermFreq(1).minDocFreq(1); + MoreLikeThisQueryBuilder queryBuilder = QueryBuilders.moreLikeThisQuery(new String[] {"text"}, null, ids("1")).include(true).minTermFreq(1).minDocFreq(1); SearchResponse mltResponse = client().prepareSearch().setTypes("type1").setQuery(queryBuilder).execute().actionGet(); assertHitCount(mltResponse, 3l); } @@ -354,8 +355,7 @@ public class MoreLikeThisIT extends ESIntegTestCase { indexRandom(true, builders); logger.info("Running MoreLikeThis"); - MoreLikeThisQueryBuilder queryBuilder = QueryBuilders.moreLikeThisQuery("text").include(true).minTermFreq(1).minDocFreq(1) - .addLikeItem(new Item("test", "type0", "0")); + MoreLikeThisQueryBuilder queryBuilder = QueryBuilders.moreLikeThisQuery(new String[] {"text"}, null, new Item[] {new Item("test", "type0", "0")}).include(true).minTermFreq(1).minDocFreq(1); String[] types = new String[numOfTypes]; for (int i = 0; i < numOfTypes; i++) { @@ -388,7 +388,8 @@ public class MoreLikeThisIT extends ESIntegTestCase { for (int i = 0; i < maxIters; i++) { int max_query_terms = randomIntBetween(1, values.length); logger.info("Running More Like This with max_query_terms = %s", max_query_terms); - MoreLikeThisQueryBuilder mltQuery = moreLikeThisQuery("text").ids("0").minTermFreq(1).minDocFreq(1) + MoreLikeThisQueryBuilder mltQuery = moreLikeThisQuery(new String[] {"text"}, null, new Item[] {new Item(null, null, "0")}) + .minTermFreq(1).minDocFreq(1) .maxQueryTerms(max_query_terms).minimumShouldMatch("0%"); SearchResponse response = client().prepareSearch("test").setTypes("type1") .setQuery(mltQuery).execute().actionGet(); @@ -419,8 +420,7 @@ public class MoreLikeThisIT extends ESIntegTestCase { logger.info("Testing each minimum_should_match from 0% - 100% with 10% increment ..."); for (int i = 0; i <= 10; i++) { String minimumShouldMatch = (10 * i) + "%"; - MoreLikeThisQueryBuilder mltQuery = moreLikeThisQuery("text") - .likeText("1 2 3 4 5 6 7 8 9 10") + MoreLikeThisQueryBuilder mltQuery = moreLikeThisQuery(new String[] {"text"}, new String[] {"1 2 3 4 5 6 7 8 9 10"}, null) .minTermFreq(1) .minDocFreq(1) .minimumShouldMatch(minimumShouldMatch); @@ -452,8 +452,7 @@ public class MoreLikeThisIT extends ESIntegTestCase { indexRandom(true, client().prepareIndex("test", "type1", "0").setSource(doc)); logger.info("Checking the document matches ..."); - MoreLikeThisQueryBuilder mltQuery = moreLikeThisQuery() - .like(new Item("test", "type1", doc).routing("0")) // routing to ensure we hit the shard with the doc + MoreLikeThisQueryBuilder mltQuery = moreLikeThisQuery(new Item[] {new Item("test", "type1", doc).routing("0")}) // routing to ensure we hit the shard with the doc .minTermFreq(0) .minDocFreq(0) .maxQueryTerms(100) @@ -484,8 +483,7 @@ public class MoreLikeThisIT extends ESIntegTestCase { .field("text", "Hello World!") .field("date", "this is not a date!") .endObject(); - MoreLikeThisQueryBuilder mltQuery = moreLikeThisQuery() - .like(new Item("test", "type1", malformedFieldDoc)) + MoreLikeThisQueryBuilder mltQuery = moreLikeThisQuery(new Item[] {new Item("test", "type1", malformedFieldDoc)}) .minTermFreq(0) .minDocFreq(0) .minimumShouldMatch("0%"); @@ -496,8 +494,7 @@ public class MoreLikeThisIT extends ESIntegTestCase { logger.info("Checking with an empty document ..."); XContentBuilder emptyDoc = jsonBuilder().startObject().endObject(); - mltQuery = moreLikeThisQuery() - .like(new Item("test", "type1", emptyDoc)) + mltQuery = moreLikeThisQuery(null, new Item[] {new Item("test", "type1", emptyDoc)}) .minTermFreq(0) .minDocFreq(0) .minimumShouldMatch("0%"); @@ -508,8 +505,7 @@ public class MoreLikeThisIT extends ESIntegTestCase { logger.info("Checking when document is malformed ..."); XContentBuilder malformedDoc = jsonBuilder().startObject(); - mltQuery = moreLikeThisQuery() - .like(new Item("test", "type1", malformedDoc)) + mltQuery = moreLikeThisQuery(null, new Item[] {new Item("test", "type1", malformedDoc)}) .minTermFreq(0) .minDocFreq(0) .minimumShouldMatch("0%"); @@ -524,8 +520,7 @@ public class MoreLikeThisIT extends ESIntegTestCase { .field("text", "Hello World!") .field("date", "1000-01-01") // should be properly parsed but ignored ... .endObject(); - mltQuery = moreLikeThisQuery() - .like(new Item("test", "type1", normalDoc)) + mltQuery = moreLikeThisQuery(null, new Item[] {new Item("test", "type1", normalDoc)}) .minTermFreq(0) .minDocFreq(0) .minimumShouldMatch("100%"); // strict all terms must match but date is ignored @@ -556,8 +551,7 @@ public class MoreLikeThisIT extends ESIntegTestCase { indexRandom(true, builders); logger.info("First check the document matches all indexed docs."); - MoreLikeThisQueryBuilder mltQuery = moreLikeThisQuery() - .like(new Item("test", "type1", doc)) + MoreLikeThisQueryBuilder mltQuery = moreLikeThisQuery(new Item[] {new Item("test", "type1", doc)}) .minTermFreq(0) .minDocFreq(0) .maxQueryTerms(100) @@ -568,12 +562,11 @@ public class MoreLikeThisIT extends ESIntegTestCase { assertHitCount(response, numFields); logger.info("Now check like this doc, but ignore one doc in the index, then two and so on..."); - List docs = new ArrayList<>(); + List docs = new ArrayList<>(numFields); for (int i = 0; i < numFields; i++) { docs.add(new Item("test", "type1", i+"")); - mltQuery = moreLikeThisQuery() - .like(new Item("test", "type1", doc)) - .ignoreLike(docs.toArray(Item.EMPTY_ARRAY)) + mltQuery = moreLikeThisQuery(null, new Item[] {new Item("test", "type1", doc)}) + .unlike(docs.toArray(new Item[docs.size()])) .minTermFreq(0) .minDocFreq(0) .maxQueryTerms(100) @@ -602,8 +595,7 @@ public class MoreLikeThisIT extends ESIntegTestCase { .field("text1", "elasticsearch") .endObject())); - MoreLikeThisQueryBuilder mltQuery = moreLikeThisQuery() - .like(new Item("test", "type1", "1")) + MoreLikeThisQueryBuilder mltQuery = moreLikeThisQuery(new Item[] {new Item("test", "type1", "1")}) .minTermFreq(0) .minDocFreq(0) .include(true) @@ -613,8 +605,7 @@ public class MoreLikeThisIT extends ESIntegTestCase { assertSearchResponse(response); assertHitCount(response, 2); - mltQuery = moreLikeThisQuery("text") - .like(new Item("test", "type1", "1")) + mltQuery = moreLikeThisQuery(new String[] {"text"}, null, new Item[] {new Item("test", "type1", "1")}) .minTermFreq(0) .minDocFreq(0) .include(true) diff --git a/core/src/test/java/org/elasticsearch/search/query/ExistsMissingIT.java b/core/src/test/java/org/elasticsearch/search/query/ExistsMissingIT.java index c232a7c4a13..4d85f8bbabd 100644 --- a/core/src/test/java/org/elasticsearch/search/query/ExistsMissingIT.java +++ b/core/src/test/java/org/elasticsearch/search/query/ExistsMissingIT.java @@ -23,7 +23,6 @@ import com.google.common.collect.ImmutableMap; import org.elasticsearch.action.explain.ExplainResponse; import org.elasticsearch.action.index.IndexRequestBuilder; -import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.json.JsonXContent; @@ -158,19 +157,19 @@ public class ExistsMissingIT extends ESIntegTestCase { client().prepareIndex("idx", "type", "3").setSource("g", "bar"), client().prepareIndex("idx", "type", "4").setSource("f", "bar")); - SearchResponse resp = client().prepareSearch("idx").setQuery(QueryBuilders.missingQuery("f").existence(true).nullValue(true)).get(); + SearchResponse resp = client().prepareSearch("idx").setQuery(QueryBuilders.missingQuery("f", true, true)).get(); assertSearchHits(resp, "2", "3"); - resp = client().prepareSearch("idx").setQuery(QueryBuilders.missingQuery("f").existence(true).nullValue(false)).get(); + resp = client().prepareSearch("idx").setQuery(QueryBuilders.missingQuery("f", false, true)).get(); assertSearchHits(resp, "2", "3"); - resp = client().prepareSearch("idx").setQuery(QueryBuilders.missingQuery("f").existence(false).nullValue(true)).get(); + resp = client().prepareSearch("idx").setQuery(QueryBuilders.missingQuery("f", true, false)).get(); assertSearchHits(resp); try { - client().prepareSearch("idx").setQuery(QueryBuilders.missingQuery("f").existence(false).nullValue(false)).get(); + client().prepareSearch("idx").setQuery(QueryBuilders.missingQuery("f", false, false)).get(); fail("both existence and null_value can't be false"); - } catch (SearchPhaseExecutionException e) { + } catch (IllegalArgumentException e) { // expected } } @@ -183,19 +182,19 @@ public class ExistsMissingIT extends ESIntegTestCase { client().prepareIndex("idx", "type", "3").setSource("g", "bar"), client().prepareIndex("idx", "type", "4").setSource("f", "bar")); - SearchResponse resp = client().prepareSearch("idx").setQuery(QueryBuilders.missingQuery("f").existence(true).nullValue(true)).get(); + SearchResponse resp = client().prepareSearch("idx").setQuery(QueryBuilders.missingQuery("f", true, true)).get(); assertSearchHits(resp, "2", "3", "4"); - resp = client().prepareSearch("idx").setQuery(QueryBuilders.missingQuery("f").existence(true).nullValue(false)).get(); + resp = client().prepareSearch("idx").setQuery(QueryBuilders.missingQuery("f", false, true)).get(); assertSearchHits(resp, "3"); - resp = client().prepareSearch("idx").setQuery(QueryBuilders.missingQuery("f").existence(false).nullValue(true)).get(); + resp = client().prepareSearch("idx").setQuery(QueryBuilders.missingQuery("f", true, false)).get(); assertSearchHits(resp, "2", "4"); try { - client().prepareSearch("idx").setQuery(QueryBuilders.missingQuery("f").existence(false).nullValue(false)).get(); + client().prepareSearch("idx").setQuery(QueryBuilders.missingQuery("f", false, false)).get(); fail("both existence and null_value can't be false"); - } catch (SearchPhaseExecutionException e) { + } catch (IllegalArgumentException e) { // expected } } diff --git a/core/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java b/core/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java index 24eb8cb7071..3110d78820c 100644 --- a/core/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java +++ b/core/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java @@ -19,15 +19,17 @@ package org.elasticsearch.search.query; import com.carrotsearch.randomizedtesting.generators.RandomPicks; +import org.apache.lucene.index.Term; +import org.apache.lucene.search.*; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.index.query.MatchQueryBuilder; -import org.elasticsearch.index.query.MultiMatchQueryBuilder; -import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.index.engine.Engine; +import org.elasticsearch.index.query.*; +import org.elasticsearch.index.search.MatchQuery; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.sort.SortBuilders; @@ -39,6 +41,7 @@ import org.junit.Test; import java.io.IOException; import java.lang.reflect.Field; import java.util.ArrayList; +import java.util.Arrays; import java.util.List; import java.util.Set; import java.util.concurrent.ExecutionException; @@ -169,10 +172,10 @@ public class MultiMatchQueryIT extends ESIntegTestCase { @Test public void testDefaults() throws ExecutionException, InterruptedException { - MatchQueryBuilder.Type type = randomBoolean() ? null : MatchQueryBuilder.Type.BOOLEAN; + MatchQuery.Type type = randomBoolean() ? MatchQueryBuilder.DEFAULT_TYPE : MatchQuery.Type.BOOLEAN; SearchResponse searchResponse = client().prepareSearch("test") .setQuery(randomizeType(multiMatchQuery("marvel hero captain america", "full_name", "first_name", "last_name", "category") - .operator(MatchQueryBuilder.Operator.OR))).get(); + .operator(Operator.OR))).get(); Set topNIds = Sets.newHashSet("theone", "theother"); for (int i = 0; i < searchResponse.getHits().hits().length; i++) { topNIds.remove(searchResponse.getHits().getAt(i).getId()); @@ -184,25 +187,25 @@ public class MultiMatchQueryIT extends ESIntegTestCase { searchResponse = client().prepareSearch("test") .setQuery(randomizeType(multiMatchQuery("marvel hero captain america", "full_name", "first_name", "last_name", "category") - .operator(MatchQueryBuilder.Operator.OR).useDisMax(false).type(type))).get(); + .operator(Operator.OR).useDisMax(false).type(type))).get(); assertFirstHit(searchResponse, anyOf(hasId("theone"), hasId("theother"))); assertThat(searchResponse.getHits().hits()[0].getScore(), greaterThan(searchResponse.getHits().hits()[1].getScore())); searchResponse = client().prepareSearch("test") .setQuery(randomizeType(multiMatchQuery("marvel hero", "full_name", "first_name", "last_name", "category") - .operator(MatchQueryBuilder.Operator.OR).type(type))).get(); + .operator(Operator.OR).type(type))).get(); assertFirstHit(searchResponse, hasId("theother")); searchResponse = client().prepareSearch("test") .setQuery(randomizeType(multiMatchQuery("captain america", "full_name", "first_name", "last_name", "category") - .operator(MatchQueryBuilder.Operator.AND).type(type))).get(); + .operator(Operator.AND).type(type))).get(); assertHitCount(searchResponse, 1l); assertFirstHit(searchResponse, hasId("theone")); searchResponse = client().prepareSearch("test") .setQuery(randomizeType(multiMatchQuery("captain america", "full_name", "first_name", "last_name", "category") - .operator(MatchQueryBuilder.Operator.AND).type(type))).get(); + .operator(Operator.AND).type(type))).get(); assertHitCount(searchResponse, 1l); assertFirstHit(searchResponse, hasId("theone")); } @@ -211,18 +214,18 @@ public class MultiMatchQueryIT extends ESIntegTestCase { public void testPhraseType() { SearchResponse searchResponse = client().prepareSearch("test") .setQuery(randomizeType(multiMatchQuery("Man the Ultimate", "full_name_phrase", "first_name_phrase", "last_name_phrase", "category_phrase") - .operator(MatchQueryBuilder.Operator.OR).type(MatchQueryBuilder.Type.PHRASE))).get(); + .operator(Operator.OR).type(MatchQuery.Type.PHRASE))).get(); assertFirstHit(searchResponse, hasId("ultimate2")); assertHitCount(searchResponse, 1l); searchResponse = client().prepareSearch("test") .setQuery(randomizeType(multiMatchQuery("Captain", "full_name_phrase", "first_name_phrase", "last_name_phrase", "category_phrase") - .operator(MatchQueryBuilder.Operator.OR).type(MatchQueryBuilder.Type.PHRASE))).get(); + .operator(Operator.OR).type(MatchQuery.Type.PHRASE))).get(); assertThat(searchResponse.getHits().getTotalHits(), greaterThan(1l)); searchResponse = client().prepareSearch("test") .setQuery(randomizeType(multiMatchQuery("the Ul", "full_name_phrase", "first_name_phrase", "last_name_phrase", "category_phrase") - .operator(MatchQueryBuilder.Operator.OR).type(MatchQueryBuilder.Type.PHRASE_PREFIX))).get(); + .operator(Operator.OR).type(MatchQuery.Type.PHRASE_PREFIX))).get(); assertSearchHits(searchResponse, "ultimate2", "ultimate1"); assertHitCount(searchResponse, 2l); } @@ -256,7 +259,7 @@ public class MultiMatchQueryIT extends ESIntegTestCase { .setQuery(multiMatchQueryBuilder).get(); MatchQueryBuilder matchQueryBuilder = QueryBuilders.matchQuery(field, builder.toString()); if (getType(multiMatchQueryBuilder) != null) { - matchQueryBuilder.type(MatchQueryBuilder.Type.valueOf(getType(multiMatchQueryBuilder).matchQueryType().toString())); + matchQueryBuilder.type(MatchQuery.Type.valueOf(getType(multiMatchQueryBuilder).matchQueryType().toString())); } SearchResponse matchResp = client().prepareSearch("test") // _uid tie sort @@ -277,11 +280,11 @@ public class MultiMatchQueryIT extends ESIntegTestCase { public void testCutoffFreq() throws ExecutionException, InterruptedException { final long numDocs = client().prepareCount("test") .setQuery(matchAllQuery()).get().getCount(); - MatchQueryBuilder.Type type = randomBoolean() ? null : MatchQueryBuilder.Type.BOOLEAN; + MatchQuery.Type type = randomBoolean() ? MatchQueryBuilder.DEFAULT_TYPE : MatchQuery.Type.BOOLEAN; Float cutoffFrequency = randomBoolean() ? Math.min(1, numDocs * 1.f / between(10, 20)) : 1.f / between(10, 20); SearchResponse searchResponse = client().prepareSearch("test") .setQuery(randomizeType(multiMatchQuery("marvel hero captain america", "full_name", "first_name", "last_name", "category") - .operator(MatchQueryBuilder.Operator.OR).cutoffFrequency(cutoffFrequency))).get(); + .operator(Operator.OR).cutoffFrequency(cutoffFrequency))).get(); Set topNIds = Sets.newHashSet("theone", "theother"); for (int i = 0; i < searchResponse.getHits().hits().length; i++) { topNIds.remove(searchResponse.getHits().getAt(i).getId()); @@ -294,39 +297,39 @@ public class MultiMatchQueryIT extends ESIntegTestCase { cutoffFrequency = randomBoolean() ? Math.min(1, numDocs * 1.f / between(10, 20)) : 1.f / between(10, 20); searchResponse = client().prepareSearch("test") .setQuery(randomizeType(multiMatchQuery("marvel hero captain america", "full_name", "first_name", "last_name", "category") - .operator(MatchQueryBuilder.Operator.OR).useDisMax(false).cutoffFrequency(cutoffFrequency).type(type))).get(); + .operator(Operator.OR).useDisMax(false).cutoffFrequency(cutoffFrequency).type(type))).get(); assertFirstHit(searchResponse, anyOf(hasId("theone"), hasId("theother"))); assertThat(searchResponse.getHits().hits()[0].getScore(), greaterThan(searchResponse.getHits().hits()[1].getScore())); long size = searchResponse.getHits().getTotalHits(); searchResponse = client().prepareSearch("test") .setQuery(randomizeType(multiMatchQuery("marvel hero captain america", "full_name", "first_name", "last_name", "category") - .operator(MatchQueryBuilder.Operator.OR).useDisMax(false).type(type))).get(); + .operator(Operator.OR).useDisMax(false).type(type))).get(); assertFirstHit(searchResponse, anyOf(hasId("theone"), hasId("theother"))); assertThat("common terms expected to be a way smaller result set", size, lessThan(searchResponse.getHits().getTotalHits())); cutoffFrequency = randomBoolean() ? Math.min(1, numDocs * 1.f / between(10, 20)) : 1.f / between(10, 20); searchResponse = client().prepareSearch("test") .setQuery(randomizeType(multiMatchQuery("marvel hero", "full_name", "first_name", "last_name", "category") - .operator(MatchQueryBuilder.Operator.OR).cutoffFrequency(cutoffFrequency).type(type))).get(); + .operator(Operator.OR).cutoffFrequency(cutoffFrequency).type(type))).get(); assertFirstHit(searchResponse, hasId("theother")); searchResponse = client().prepareSearch("test") .setQuery(randomizeType(multiMatchQuery("captain america", "full_name", "first_name", "last_name", "category") - .operator(MatchQueryBuilder.Operator.AND).cutoffFrequency(cutoffFrequency).type(type))).get(); + .operator(Operator.AND).cutoffFrequency(cutoffFrequency).type(type))).get(); assertHitCount(searchResponse, 1l); assertFirstHit(searchResponse, hasId("theone")); searchResponse = client().prepareSearch("test") .setQuery(randomizeType(multiMatchQuery("captain america", "full_name", "first_name", "last_name", "category") - .operator(MatchQueryBuilder.Operator.AND).cutoffFrequency(cutoffFrequency).type(type))).get(); + .operator(Operator.AND).cutoffFrequency(cutoffFrequency).type(type))).get(); assertHitCount(searchResponse, 1l); assertFirstHit(searchResponse, hasId("theone")); searchResponse = client().prepareSearch("test") .setQuery(randomizeType(multiMatchQuery("marvel hero", "first_name", "last_name", "category") - .operator(MatchQueryBuilder.Operator.AND).cutoffFrequency(cutoffFrequency) + .operator(Operator.AND).cutoffFrequency(cutoffFrequency) .analyzer("category") .type(MultiMatchQueryBuilder.Type.CROSS_FIELDS))).get(); assertHitCount(searchResponse, 1l); @@ -342,13 +345,13 @@ public class MultiMatchQueryIT extends ESIntegTestCase { int numIters = scaledRandomIntBetween(5, 10); for (int i = 0; i < numIters; i++) { { - MatchQueryBuilder.Type type = randomBoolean() ? null : MatchQueryBuilder.Type.BOOLEAN; + MatchQuery.Type type = randomBoolean() ? MatchQueryBuilder.DEFAULT_TYPE : MatchQuery.Type.BOOLEAN; MultiMatchQueryBuilder multiMatchQueryBuilder = randomBoolean() ? multiMatchQuery("marvel hero captain america", "full_name", "first_name", "last_name", "category") : multiMatchQuery("marvel hero captain america", "*_name", randomBoolean() ? "category" : "categ*"); SearchResponse left = client().prepareSearch("test").setSize(numDocs) .addSort(SortBuilders.scoreSort()).addSort(SortBuilders.fieldSort("_uid")) .setQuery(randomizeType(multiMatchQueryBuilder - .operator(MatchQueryBuilder.Operator.OR).type(type))).get(); + .operator(Operator.OR).type(type))).get(); SearchResponse right = client().prepareSearch("test").setSize(numDocs) .addSort(SortBuilders.scoreSort()).addSort(SortBuilders.fieldSort("_uid")) @@ -362,9 +365,9 @@ public class MultiMatchQueryIT extends ESIntegTestCase { } { - MatchQueryBuilder.Type type = randomBoolean() ? null : MatchQueryBuilder.Type.BOOLEAN; + MatchQuery.Type type = randomBoolean() ? MatchQueryBuilder.DEFAULT_TYPE : MatchQuery.Type.BOOLEAN; String minShouldMatch = randomBoolean() ? null : "" + between(0, 1); - MatchQueryBuilder.Operator op = randomBoolean() ? MatchQueryBuilder.Operator.AND : MatchQueryBuilder.Operator.OR; + Operator op = randomBoolean() ? Operator.AND : Operator.OR; MultiMatchQueryBuilder multiMatchQueryBuilder = randomBoolean() ? multiMatchQuery("captain america", "full_name", "first_name", "last_name", "category") : multiMatchQuery("captain america", "*_name", randomBoolean() ? "category" : "categ*"); SearchResponse left = client().prepareSearch("test").setSize(numDocs) @@ -385,11 +388,11 @@ public class MultiMatchQueryIT extends ESIntegTestCase { { String minShouldMatch = randomBoolean() ? null : "" + between(0, 1); - MatchQueryBuilder.Operator op = randomBoolean() ? MatchQueryBuilder.Operator.AND : MatchQueryBuilder.Operator.OR; + Operator op = randomBoolean() ? Operator.AND : Operator.OR; SearchResponse left = client().prepareSearch("test").setSize(numDocs) .addSort(SortBuilders.scoreSort()).addSort(SortBuilders.fieldSort("_uid")) .setQuery(randomizeType(multiMatchQuery("capta", "full_name", "first_name", "last_name", "category") - .type(MatchQueryBuilder.Type.PHRASE_PREFIX).useDisMax(false).minimumShouldMatch(minShouldMatch))).get(); + .type(MatchQuery.Type.PHRASE_PREFIX).useDisMax(false).minimumShouldMatch(minShouldMatch))).get(); SearchResponse right = client().prepareSearch("test").setSize(numDocs) .addSort(SortBuilders.scoreSort()).addSort(SortBuilders.fieldSort("_uid")) @@ -403,18 +406,18 @@ public class MultiMatchQueryIT extends ESIntegTestCase { } { String minShouldMatch = randomBoolean() ? null : "" + between(0, 1); - MatchQueryBuilder.Operator op = randomBoolean() ? MatchQueryBuilder.Operator.AND : MatchQueryBuilder.Operator.OR; + Operator op = randomBoolean() ? Operator.AND : Operator.OR; SearchResponse left; if (randomBoolean()) { left = client().prepareSearch("test").setSize(numDocs) .addSort(SortBuilders.scoreSort()).addSort(SortBuilders.fieldSort("_uid")) .setQuery(randomizeType(multiMatchQuery("captain america", "full_name", "first_name", "last_name", "category") - .type(MatchQueryBuilder.Type.PHRASE).useDisMax(false).minimumShouldMatch(minShouldMatch))).get(); + .type(MatchQuery.Type.PHRASE).useDisMax(false).minimumShouldMatch(minShouldMatch))).get(); } else { left = client().prepareSearch("test").setSize(numDocs) .addSort(SortBuilders.scoreSort()).addSort(SortBuilders.fieldSort("_uid")) .setQuery(randomizeType(multiMatchQuery("captain america", "full_name", "first_name", "last_name", "category") - .type(MatchQueryBuilder.Type.PHRASE).tieBreaker(1.0f).minimumShouldMatch(minShouldMatch))).get(); + .type(MatchQuery.Type.PHRASE).tieBreaker(1.0f).minimumShouldMatch(minShouldMatch))).get(); } SearchResponse right = client().prepareSearch("test").setSize(numDocs) .addSort(SortBuilders.scoreSort()).addSort(SortBuilders.fieldSort("_uid")) @@ -434,13 +437,13 @@ public class MultiMatchQueryIT extends ESIntegTestCase { SearchResponse searchResponse = client().prepareSearch("test") .setQuery(randomizeType(multiMatchQuery("captain america", "full_name", "first_name", "last_name") .type(MultiMatchQueryBuilder.Type.CROSS_FIELDS) - .operator(MatchQueryBuilder.Operator.OR))).get(); + .operator(Operator.OR))).get(); assertFirstHit(searchResponse, hasId("theone")); searchResponse = client().prepareSearch("test") .setQuery(randomizeType(multiMatchQuery("marvel hero captain america", "full_name", "first_name", "last_name", "category") .type(MultiMatchQueryBuilder.Type.CROSS_FIELDS) - .operator(MatchQueryBuilder.Operator.OR))).get(); + .operator(Operator.OR))).get(); assertFirstHit(searchResponse, hasId("theone")); assertSecondHit(searchResponse, hasId("theother")); assertThat(searchResponse.getHits().hits()[0].getScore(), greaterThan(searchResponse.getHits().hits()[1].getScore())); @@ -448,13 +451,13 @@ public class MultiMatchQueryIT extends ESIntegTestCase { searchResponse = client().prepareSearch("test") .setQuery(randomizeType(multiMatchQuery("marvel hero", "full_name", "first_name", "last_name", "category") .type(MultiMatchQueryBuilder.Type.CROSS_FIELDS) - .operator(MatchQueryBuilder.Operator.OR))).get(); + .operator(Operator.OR))).get(); assertFirstHit(searchResponse, hasId("theother")); searchResponse = client().prepareSearch("test") .setQuery(randomizeType(multiMatchQuery("captain america", "full_name", "first_name", "last_name", "category") .type(MultiMatchQueryBuilder.Type.CROSS_FIELDS) - .operator(MatchQueryBuilder.Operator.AND))).get(); + .operator(Operator.AND))).get(); assertHitCount(searchResponse, 1l); assertFirstHit(searchResponse, hasId("theone")); @@ -462,7 +465,7 @@ public class MultiMatchQueryIT extends ESIntegTestCase { .setQuery(randomizeType(multiMatchQuery("captain america 15", "full_name", "first_name", "last_name", "category", "skill") .type(MultiMatchQueryBuilder.Type.CROSS_FIELDS) .analyzer("category") - .operator(MatchQueryBuilder.Operator.AND))).get(); + .operator(Operator.AND))).get(); assertHitCount(searchResponse, 1l); assertFirstHit(searchResponse, hasId("theone")); @@ -483,7 +486,7 @@ public class MultiMatchQueryIT extends ESIntegTestCase { .type(MultiMatchQueryBuilder.Type.CROSS_FIELDS) .cutoffFrequency(0.1f) .analyzer("category") - .operator(MatchQueryBuilder.Operator.OR))).get(); + .operator(Operator.OR))).get(); assertFirstHit(searchResponse, anyOf(hasId("theother"), hasId("theone"))); long numResults = searchResponse.getHits().totalHits(); @@ -491,7 +494,7 @@ public class MultiMatchQueryIT extends ESIntegTestCase { .setQuery(randomizeType(multiMatchQuery("captain america marvel hero", "first_name", "last_name", "category") .type(MultiMatchQueryBuilder.Type.CROSS_FIELDS) .analyzer("category") - .operator(MatchQueryBuilder.Operator.OR))).get(); + .operator(Operator.OR))).get(); assertThat(numResults, lessThan(searchResponse.getHits().getTotalHits())); assertFirstHit(searchResponse, hasId("theone")); @@ -501,28 +504,28 @@ public class MultiMatchQueryIT extends ESIntegTestCase { .setQuery(randomizeType(multiMatchQuery("captain america marvel hero", "first_name", "last_name", "category") .type(MultiMatchQueryBuilder.Type.CROSS_FIELDS) .analyzer("category") - .operator(MatchQueryBuilder.Operator.AND))).get(); + .operator(Operator.AND))).get(); assertHitCount(searchResponse, 1l); assertFirstHit(searchResponse, hasId("theone")); // counter example searchResponse = client().prepareSearch("test") .setQuery(randomizeType(multiMatchQuery("captain america marvel hero", "first_name", "last_name", "category") - .type(randomBoolean() ? MultiMatchQueryBuilder.Type.CROSS_FIELDS : null) - .operator(MatchQueryBuilder.Operator.AND))).get(); + .type(randomBoolean() ? MultiMatchQueryBuilder.Type.CROSS_FIELDS : MultiMatchQueryBuilder.DEFAULT_TYPE) + .operator(Operator.AND))).get(); assertHitCount(searchResponse, 0l); // counter example searchResponse = client().prepareSearch("test") .setQuery(randomizeType(multiMatchQuery("captain america marvel hero", "first_name", "last_name", "category") - .type(randomBoolean() ? MultiMatchQueryBuilder.Type.CROSS_FIELDS : null) - .operator(MatchQueryBuilder.Operator.AND))).get(); + .type(randomBoolean() ? MultiMatchQueryBuilder.Type.CROSS_FIELDS : MultiMatchQueryBuilder.DEFAULT_TYPE) + .operator(Operator.AND))).get(); assertHitCount(searchResponse, 0l); // test if boosts work searchResponse = client().prepareSearch("test") - .setQuery(randomizeType(multiMatchQuery("the ultimate", "full_name", "first_name", "last_name^2", "category") + .setQuery(randomizeType(multiMatchQuery("the ultimate", "full_name", "first_name", "last_name", "category").field("last_name", 2) .type(MultiMatchQueryBuilder.Type.CROSS_FIELDS) - .operator(MatchQueryBuilder.Operator.AND))).get(); + .operator(Operator.AND))).get(); assertFirstHit(searchResponse, hasId("ultimate1")); // has ultimate in the last_name and that is boosted assertSecondHit(searchResponse, hasId("ultimate2")); assertThat(searchResponse.getHits().hits()[0].getScore(), greaterThan(searchResponse.getHits().hits()[1].getScore())); @@ -532,7 +535,7 @@ public class MultiMatchQueryIT extends ESIntegTestCase { searchResponse = client().prepareSearch("test") .setQuery(randomizeType(multiMatchQuery("the ultimate", "full_name", "first_name", "last_name", "category") .type(MultiMatchQueryBuilder.Type.CROSS_FIELDS) - .operator(MatchQueryBuilder.Operator.AND))).get(); + .operator(Operator.AND))).get(); assertFirstHit(searchResponse, hasId("ultimate2")); assertSecondHit(searchResponse, hasId("ultimate1")); assertThat(searchResponse.getHits().hits()[0].getScore(), greaterThan(searchResponse.getHits().hits()[1].getScore())); @@ -558,7 +561,6 @@ public class MultiMatchQueryIT extends ESIntegTestCase { } } - public static List fill(List list, String value, int times) { for (int i = 0; i < times; i++) { list.add(value); @@ -599,24 +601,24 @@ public class MultiMatchQueryIT extends ESIntegTestCase { switch (type) { case BEST_FIELDS: if (randomBoolean()) { - oType = MatchQueryBuilder.Type.BOOLEAN; + oType = MatchQuery.Type.BOOLEAN; } break; case MOST_FIELDS: if (randomBoolean()) { - oType = MatchQueryBuilder.Type.BOOLEAN; + oType = MatchQuery.Type.BOOLEAN; } break; case CROSS_FIELDS: break; case PHRASE: if (randomBoolean()) { - oType = MatchQueryBuilder.Type.PHRASE; + oType = MatchQuery.Type.PHRASE; } break; case PHRASE_PREFIX: if (randomBoolean()) { - oType = MatchQueryBuilder.Type.PHRASE_PREFIX; + oType = MatchQuery.Type.PHRASE_PREFIX; } break; } diff --git a/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java b/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java index 8ee123fa7a4..83a8008213f 100644 --- a/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java +++ b/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java @@ -20,7 +20,6 @@ package org.elasticsearch.search.query; import org.apache.lucene.util.English; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.index.IndexRequestBuilder; @@ -32,15 +31,10 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.query.BoolQueryBuilder; -import org.elasticsearch.index.query.CommonTermsQueryBuilder.Operator; -import org.elasticsearch.index.query.MatchQueryBuilder; -import org.elasticsearch.index.query.MatchQueryBuilder.Type; -import org.elasticsearch.index.query.MultiMatchQueryBuilder; -import org.elasticsearch.index.query.QueryBuilders; -import org.elasticsearch.index.query.QueryStringQueryBuilder; -import org.elasticsearch.index.query.TermQueryBuilder; -import org.elasticsearch.index.query.WrapperQueryBuilder; +import org.elasticsearch.index.query.*; +import org.elasticsearch.index.search.MatchQuery.Type; +import org.elasticsearch.index.search.MatchQuery; +import org.elasticsearch.indices.cache.query.terms.TermsLookup; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.script.Script; import org.elasticsearch.search.SearchHit; @@ -62,24 +56,8 @@ import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.*; import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.scriptFunction; import static org.elasticsearch.test.VersionUtils.randomVersion; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFailures; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFirstHit; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHit; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSecondHit; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertThirdHit; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasId; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasScore; -import static org.hamcrest.Matchers.allOf; -import static org.hamcrest.Matchers.closeTo; -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.greaterThan; -import static org.hamcrest.Matchers.is; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*; +import static org.hamcrest.Matchers.*; public class SearchQueryIT extends ESIntegTestCase { @@ -185,7 +163,7 @@ public class SearchQueryIT extends ESIntegTestCase { client().prepareIndex("test", "type1", "1").setSource("field1", "quick brown fox", "field2", "quick brown fox"), client().prepareIndex("test", "type1", "2").setSource("field1", "quick lazy huge brown fox", "field2", "quick lazy huge brown fox")); - SearchResponse searchResponse = client().prepareSearch().setQuery(matchQuery("field2", "quick brown").type(MatchQueryBuilder.Type.PHRASE).slop(0)).get(); + SearchResponse searchResponse = client().prepareSearch().setQuery(matchQuery("field2", "quick brown").type(Type.PHRASE).slop(0)).get(); assertHitCount(searchResponse, 1l); assertFailures(client().prepareSearch().setQuery(matchQuery("field1", "quick brown").type(Type.PHRASE).slop(0)), @@ -349,18 +327,18 @@ public class SearchQueryIT extends ESIntegTestCase { assertThirdHit(searchResponse, hasId("2")); // try the same with match query - searchResponse = client().prepareSearch().setQuery(matchQuery("field1", "the quick brown").cutoffFrequency(3).operator(MatchQueryBuilder.Operator.AND)).get(); + searchResponse = client().prepareSearch().setQuery(matchQuery("field1", "the quick brown").cutoffFrequency(3).operator(Operator.AND)).get(); assertHitCount(searchResponse, 2l); assertFirstHit(searchResponse, hasId("1")); assertSecondHit(searchResponse, hasId("2")); - searchResponse = client().prepareSearch().setQuery(matchQuery("field1", "the quick brown").cutoffFrequency(3).operator(MatchQueryBuilder.Operator.OR)).get(); + searchResponse = client().prepareSearch().setQuery(matchQuery("field1", "the quick brown").cutoffFrequency(3).operator(Operator.OR)).get(); assertHitCount(searchResponse, 3l); assertFirstHit(searchResponse, hasId("1")); assertSecondHit(searchResponse, hasId("2")); assertThirdHit(searchResponse, hasId("3")); - searchResponse = client().prepareSearch().setQuery(matchQuery("field1", "the quick brown").cutoffFrequency(3).operator(MatchQueryBuilder.Operator.AND).analyzer("stop")).get(); + searchResponse = client().prepareSearch().setQuery(matchQuery("field1", "the quick brown").cutoffFrequency(3).operator(Operator.AND).analyzer("stop")).get(); assertHitCount(searchResponse, 3l); // stop drops "the" since its a stopword assertFirstHit(searchResponse, hasId("1")); @@ -368,7 +346,7 @@ public class SearchQueryIT extends ESIntegTestCase { assertThirdHit(searchResponse, hasId("2")); // try the same with multi match query - searchResponse = client().prepareSearch().setQuery(multiMatchQuery("the quick brown", "field1", "field2").cutoffFrequency(3).operator(MatchQueryBuilder.Operator.AND)).get(); + searchResponse = client().prepareSearch().setQuery(multiMatchQuery("the quick brown", "field1", "field2").cutoffFrequency(3).operator(Operator.AND)).get(); assertHitCount(searchResponse, 3l); assertFirstHit(searchResponse, hasId("3")); // better score due to different query stats assertSecondHit(searchResponse, hasId("1")); @@ -441,18 +419,18 @@ public class SearchQueryIT extends ESIntegTestCase { assertThirdHit(searchResponse, hasId("2")); // try the same with match query - searchResponse = client().prepareSearch().setQuery(matchQuery("field1", "the fast brown").cutoffFrequency(3).operator(MatchQueryBuilder.Operator.AND)).get(); + searchResponse = client().prepareSearch().setQuery(matchQuery("field1", "the fast brown").cutoffFrequency(3).operator(Operator.AND)).get(); assertHitCount(searchResponse, 2l); assertFirstHit(searchResponse, hasId("1")); assertSecondHit(searchResponse, hasId("2")); - searchResponse = client().prepareSearch().setQuery(matchQuery("field1", "the fast brown").cutoffFrequency(3).operator(MatchQueryBuilder.Operator.OR)).get(); + searchResponse = client().prepareSearch().setQuery(matchQuery("field1", "the fast brown").cutoffFrequency(3).operator(Operator.OR)).get(); assertHitCount(searchResponse, 3l); assertFirstHit(searchResponse, hasId("1")); assertSecondHit(searchResponse, hasId("2")); assertThirdHit(searchResponse, hasId("3")); - searchResponse = client().prepareSearch().setQuery(matchQuery("field1", "the fast brown").cutoffFrequency(3).operator(MatchQueryBuilder.Operator.AND).analyzer("stop")).get(); + searchResponse = client().prepareSearch().setQuery(matchQuery("field1", "the fast brown").cutoffFrequency(3).operator(Operator.AND).analyzer("stop")).get(); assertHitCount(searchResponse, 3l); // stop drops "the" since its a stopword assertFirstHit(searchResponse, hasId("1")); @@ -465,7 +443,7 @@ public class SearchQueryIT extends ESIntegTestCase { assertSecondHit(searchResponse, hasId("2")); // try the same with multi match query - searchResponse = client().prepareSearch().setQuery(multiMatchQuery("the fast brown", "field1", "field2").cutoffFrequency(3).operator(MatchQueryBuilder.Operator.AND)).get(); + searchResponse = client().prepareSearch().setQuery(multiMatchQuery("the fast brown", "field1", "field2").cutoffFrequency(3).operator(Operator.AND)).get(); assertHitCount(searchResponse, 3l); assertFirstHit(searchResponse, hasId("3")); // better score due to different query stats assertSecondHit(searchResponse, hasId("1")); @@ -489,10 +467,10 @@ public class SearchQueryIT extends ESIntegTestCase { client().prepareIndex("test", "type1", "2").setSource("field1", "quick lazy huge brown fox", "field2", "quick lazy huge brown fox")); - SearchResponse searchResponse = client().prepareSearch().setQuery(matchQuery("field2", "quick brown").type(MatchQueryBuilder.Type.PHRASE).slop(0)).get(); + SearchResponse searchResponse = client().prepareSearch().setQuery(matchQuery("field2", "quick brown").type(Type.PHRASE).slop(0)).get(); assertHitCount(searchResponse, 1l); try { - client().prepareSearch().setQuery(matchQuery("field1", "quick brown").type(MatchQueryBuilder.Type.PHRASE).slop(0)).get(); + client().prepareSearch().setQuery(matchQuery("field1", "quick brown").type(Type.PHRASE).slop(0)).get(); fail("SearchPhaseExecutionException should have been thrown"); } catch (SearchPhaseExecutionException e) { assertTrue(e.toString().contains("IllegalStateException[field \"field1\" was indexed without position data; cannot run PhraseQuery")); @@ -923,7 +901,7 @@ public class SearchQueryIT extends ESIntegTestCase { client().admin().indices().prepareRefresh("test").get(); builder = multiMatchQuery("value1", "field1", "field2") - .operator(MatchQueryBuilder.Operator.AND); // Operator only applies on terms inside a field! Fields are always OR-ed together. + .operator(Operator.AND); // Operator only applies on terms inside a field! Fields are always OR-ed together. searchResponse = client().prepareSearch() .setQuery(builder) .get(); @@ -931,15 +909,15 @@ public class SearchQueryIT extends ESIntegTestCase { assertFirstHit(searchResponse, hasId("1")); refresh(); - builder = multiMatchQuery("value1", "field1", "field3^1.5") - .operator(MatchQueryBuilder.Operator.AND); // Operator only applies on terms inside a field! Fields are always OR-ed together. + builder = multiMatchQuery("value1", "field1").field("field3", 1.5f) + .operator(Operator.AND); // Operator only applies on terms inside a field! Fields are always OR-ed together. searchResponse = client().prepareSearch().setQuery(builder).get(); assertHitCount(searchResponse, 2l); assertSearchHits(searchResponse, "3", "1"); client().admin().indices().prepareRefresh("test").get(); builder = multiMatchQuery("value1").field("field1").field("field3", 1.5f) - .operator(MatchQueryBuilder.Operator.AND); // Operator only applies on terms inside a field! Fields are always OR-ed together. + .operator(Operator.AND); // Operator only applies on terms inside a field! Fields are always OR-ed together. searchResponse = client().prepareSearch().setQuery(builder).get(); assertHitCount(searchResponse, 2l); assertSearchHits(searchResponse, "3", "1"); @@ -969,18 +947,18 @@ public class SearchQueryIT extends ESIntegTestCase { refresh(); BoolQueryBuilder boolQuery = boolQuery() - .must(matchQuery("field1", "a").zeroTermsQuery(MatchQueryBuilder.ZeroTermsQuery.NONE)) - .must(matchQuery("field1", "value1").zeroTermsQuery(MatchQueryBuilder.ZeroTermsQuery.NONE)); + .must(matchQuery("field1", "a").zeroTermsQuery(MatchQuery.ZeroTermsQuery.NONE)) + .must(matchQuery("field1", "value1").zeroTermsQuery(MatchQuery.ZeroTermsQuery.NONE)); SearchResponse searchResponse = client().prepareSearch().setQuery(boolQuery).get(); assertHitCount(searchResponse, 0l); boolQuery = boolQuery() - .must(matchQuery("field1", "a").zeroTermsQuery(MatchQueryBuilder.ZeroTermsQuery.ALL)) - .must(matchQuery("field1", "value1").zeroTermsQuery(MatchQueryBuilder.ZeroTermsQuery.ALL)); + .must(matchQuery("field1", "a").zeroTermsQuery(MatchQuery.ZeroTermsQuery.ALL)) + .must(matchQuery("field1", "value1").zeroTermsQuery(MatchQuery.ZeroTermsQuery.ALL)); searchResponse = client().prepareSearch().setQuery(boolQuery).get(); assertHitCount(searchResponse, 1l); - boolQuery = boolQuery().must(matchQuery("field1", "a").zeroTermsQuery(MatchQueryBuilder.ZeroTermsQuery.ALL)); + boolQuery = boolQuery().must(matchQuery("field1", "a").zeroTermsQuery(MatchQuery.ZeroTermsQuery.ALL)); searchResponse = client().prepareSearch().setQuery(boolQuery).get(); assertHitCount(searchResponse, 2l); } @@ -994,18 +972,18 @@ public class SearchQueryIT extends ESIntegTestCase { BoolQueryBuilder boolQuery = boolQuery() - .must(multiMatchQuery("a", "field1", "field2").zeroTermsQuery(MatchQueryBuilder.ZeroTermsQuery.NONE)) - .must(multiMatchQuery("value1", "field1", "field2").zeroTermsQuery(MatchQueryBuilder.ZeroTermsQuery.NONE)); // Fields are ORed together + .must(multiMatchQuery("a", "field1", "field2").zeroTermsQuery(MatchQuery.ZeroTermsQuery.NONE)) + .must(multiMatchQuery("value1", "field1", "field2").zeroTermsQuery(MatchQuery.ZeroTermsQuery.NONE)); // Fields are ORed together SearchResponse searchResponse = client().prepareSearch().setQuery(boolQuery).get(); assertHitCount(searchResponse, 0l); boolQuery = boolQuery() - .must(multiMatchQuery("a", "field1", "field2").zeroTermsQuery(MatchQueryBuilder.ZeroTermsQuery.ALL)) - .must(multiMatchQuery("value4", "field1", "field2").zeroTermsQuery(MatchQueryBuilder.ZeroTermsQuery.ALL)); + .must(multiMatchQuery("a", "field1", "field2").zeroTermsQuery(MatchQuery.ZeroTermsQuery.ALL)) + .must(multiMatchQuery("value4", "field1", "field2").zeroTermsQuery(MatchQuery.ZeroTermsQuery.ALL)); searchResponse = client().prepareSearch().setQuery(boolQuery).get(); assertHitCount(searchResponse, 1l); - boolQuery = boolQuery().must(multiMatchQuery("a", "field1").zeroTermsQuery(MatchQueryBuilder.ZeroTermsQuery.ALL)); + boolQuery = boolQuery().must(multiMatchQuery("a", "field1").zeroTermsQuery(MatchQuery.ZeroTermsQuery.ALL)); searchResponse = client().prepareSearch().setQuery(boolQuery).get(); assertHitCount(searchResponse, 2l); } @@ -1254,63 +1232,54 @@ public class SearchQueryIT extends ESIntegTestCase { client().prepareIndex("test", "type", "4").setSource("term", "4") ); SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(termsLookupQuery("term").lookupIndex("lookup").lookupType("type").lookupId("1").lookupPath("terms") - ).get(); + .setQuery(termsLookupQuery("term" , new TermsLookup("lookup", "type", "1", "terms"))).get(); assertHitCount(searchResponse, 2l); assertSearchHits(searchResponse, "1", "3"); // same as above, just on the _id... searchResponse = client().prepareSearch("test") - .setQuery(termsLookupQuery("_id").lookupIndex("lookup").lookupType("type").lookupId("1").lookupPath("terms") + .setQuery(termsLookupQuery("_id", new TermsLookup("lookup", "type", "1", "terms")) ).get(); assertHitCount(searchResponse, 2l); assertSearchHits(searchResponse, "1", "3"); // another search with same parameters... searchResponse = client().prepareSearch("test") - .setQuery(termsLookupQuery("term").lookupIndex("lookup").lookupType("type").lookupId("1").lookupPath("terms") - ).get(); + .setQuery(termsLookupQuery("term", new TermsLookup("lookup", "type", "1", "terms"))).get(); assertHitCount(searchResponse, 2l); assertSearchHits(searchResponse, "1", "3"); searchResponse = client().prepareSearch("test") - .setQuery(termsLookupQuery("term").lookupIndex("lookup").lookupType("type").lookupId("2").lookupPath("terms") - ).get(); + .setQuery(termsLookupQuery("term", new TermsLookup("lookup", "type", "2", "terms"))).get(); assertHitCount(searchResponse, 1l); assertFirstHit(searchResponse, hasId("2")); searchResponse = client().prepareSearch("test") - .setQuery(termsLookupQuery("term").lookupIndex("lookup").lookupType("type").lookupId("3").lookupPath("terms") - ).get(); + .setQuery(termsLookupQuery("term", new TermsLookup("lookup", "type", "3", "terms"))).get(); assertHitCount(searchResponse, 2l); assertSearchHits(searchResponse, "2", "4"); searchResponse = client().prepareSearch("test") - .setQuery(termsLookupQuery("term").lookupIndex("lookup").lookupType("type").lookupId("4").lookupPath("terms") - ).get(); + .setQuery(termsLookupQuery("term", new TermsLookup("lookup", "type", "4", "terms"))).get(); assertHitCount(searchResponse, 0l); searchResponse = client().prepareSearch("test") - .setQuery(termsLookupQuery("term").lookupIndex("lookup2").lookupType("type").lookupId("1").lookupPath("arr.term") - ).get(); + .setQuery(termsLookupQuery("term", new TermsLookup("lookup2", "type", "1", "arr.term"))).get(); assertHitCount(searchResponse, 2l); assertSearchHits(searchResponse, "1", "3"); searchResponse = client().prepareSearch("test") - .setQuery(termsLookupQuery("term").lookupIndex("lookup2").lookupType("type").lookupId("2").lookupPath("arr.term") - ).get(); + .setQuery(termsLookupQuery("term", new TermsLookup("lookup2", "type", "2", "arr.term"))).get(); assertHitCount(searchResponse, 1l); assertFirstHit(searchResponse, hasId("2")); searchResponse = client().prepareSearch("test") - .setQuery(termsLookupQuery("term").lookupIndex("lookup2").lookupType("type").lookupId("3").lookupPath("arr.term") - ).get(); + .setQuery(termsLookupQuery("term", new TermsLookup("lookup2", "type", "3", "arr.term"))).get(); assertHitCount(searchResponse, 2l); assertSearchHits(searchResponse, "2", "4"); searchResponse = client().prepareSearch("test") - .setQuery(termsLookupQuery("not_exists").lookupIndex("lookup2").lookupType("type").lookupId("3").lookupPath("arr.term") - ).get(); + .setQuery(termsLookupQuery("not_exists", new TermsLookup("lookup2", "type", "3", "arr.term"))).get(); assertHitCount(searchResponse, 0l); } @@ -1579,14 +1548,12 @@ public class SearchQueryIT extends ESIntegTestCase { client().prepareIndex("test", "test", "4").setSource("description", "foo")); SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(spanOrQuery().clause(spanTermQuery("description", "bar"))).get(); + .setQuery(spanOrQuery(spanTermQuery("description", "bar"))).get(); assertHitCount(searchResponse, 1l); searchResponse = client().prepareSearch("test").setQuery( - spanNearQuery() - .clause(spanTermQuery("description", "foo")) - .clause(spanTermQuery("description", "other")) - .slop(3)).get(); + spanNearQuery(spanTermQuery("description", "foo"), 3) + .clause(spanTermQuery("description", "other"))).get(); assertHitCount(searchResponse, 3l); } @@ -1601,24 +1568,24 @@ public class SearchQueryIT extends ESIntegTestCase { refresh(); SearchResponse response = client().prepareSearch("test") - .setQuery(spanOrQuery().clause(spanMultiTermQueryBuilder(fuzzyQuery("description", "fop")))).get(); + .setQuery(spanOrQuery(spanMultiTermQueryBuilder(fuzzyQuery("description", "fop")))).get(); assertHitCount(response, 4); response = client().prepareSearch("test") - .setQuery(spanOrQuery().clause(spanMultiTermQueryBuilder(prefixQuery("description", "fo")))).get(); + .setQuery(spanOrQuery(spanMultiTermQueryBuilder(prefixQuery("description", "fo")))).get(); assertHitCount(response, 4); response = client().prepareSearch("test") - .setQuery(spanOrQuery().clause(spanMultiTermQueryBuilder(wildcardQuery("description", "oth*")))).get(); + .setQuery(spanOrQuery(spanMultiTermQueryBuilder(wildcardQuery("description", "oth*")))).get(); assertHitCount(response, 3); response = client().prepareSearch("test") - .setQuery(spanOrQuery().clause(spanMultiTermQueryBuilder(QueryBuilders.rangeQuery("description").from("ffa").to("foo")))) + .setQuery(spanOrQuery(spanMultiTermQueryBuilder(QueryBuilders.rangeQuery("description").from("ffa").to("foo")))) .execute().actionGet(); assertHitCount(response, 3); response = client().prepareSearch("test") - .setQuery(spanOrQuery().clause(spanMultiTermQueryBuilder(regexpQuery("description", "fo{2}")))).get(); + .setQuery(spanOrQuery(spanMultiTermQueryBuilder(regexpQuery("description", "fo{2}")))).get(); assertHitCount(response, 3); } @@ -1631,33 +1598,19 @@ public class SearchQueryIT extends ESIntegTestCase { refresh(); SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(spanNotQuery().include(spanNearQuery() - .clause(QueryBuilders.spanTermQuery("description", "quick")) - .clause(QueryBuilders.spanTermQuery("description", "fox")).slop(1)).exclude(spanTermQuery("description", "brown"))).get(); + .setQuery(spanNotQuery(spanNearQuery(QueryBuilders.spanTermQuery("description", "quick"), 1) + .clause(QueryBuilders.spanTermQuery("description", "fox")), spanTermQuery("description", "brown"))).get(); assertHitCount(searchResponse, 1l); searchResponse = client().prepareSearch("test") - .setQuery(spanNotQuery().include(spanNearQuery() - .clause(QueryBuilders.spanTermQuery("description", "quick")) - .clause(QueryBuilders.spanTermQuery("description", "fox")).slop(1)).exclude(spanTermQuery("description", "sleeping")).dist(5)).get(); + .setQuery(spanNotQuery(spanNearQuery(QueryBuilders.spanTermQuery("description", "quick"), 1) + .clause(QueryBuilders.spanTermQuery("description", "fox")), spanTermQuery("description", "sleeping")).dist(5)).get(); assertHitCount(searchResponse, 1l); searchResponse = client().prepareSearch("test") - .setQuery(spanNotQuery().include(spanNearQuery() - .clause(QueryBuilders.spanTermQuery("description", "quick")) - .clause(QueryBuilders.spanTermQuery("description", "fox")).slop(1)).exclude(spanTermQuery("description", "jumped")).pre(1).post(1)).get(); + .setQuery(spanNotQuery(spanNearQuery(QueryBuilders.spanTermQuery("description", "quick"), 1) + .clause(QueryBuilders.spanTermQuery("description", "fox")), spanTermQuery("description", "jumped")).pre(1).post(1)).get(); assertHitCount(searchResponse, 1l); - - try { - client().prepareSearch("test") - .setQuery(spanNotQuery().include(spanNearQuery() - .clause(QueryBuilders.spanTermQuery("description", "quick")) - .clause(QueryBuilders.spanTermQuery("description", "fox")).slop(1)).exclude(spanTermQuery("description", "jumped")).dist(2).pre(2) - ).get(); - fail("ElasticsearchIllegalArgumentException should have been caught"); - } catch (ElasticsearchException e) { - assertThat("ElasticsearchIllegalArgumentException should have been caught", e.getDetailedMessage(), containsString("spanNot can either use [dist] or [pre] & [post] (or none)")); - } } @Test @@ -1753,18 +1706,18 @@ public class SearchQueryIT extends ESIntegTestCase { client().prepareIndex("test", "test", "1").setSource("text", "quick brown fox").get(); refresh(); - SearchResponse searchResponse = client().prepareSearch("test").setQuery(matchQuery("text", "quick").operator(MatchQueryBuilder.Operator.AND)).get(); + SearchResponse searchResponse = client().prepareSearch("test").setQuery(matchQuery("text", "quick").operator(Operator.AND)).get(); assertHitCount(searchResponse, 1); - searchResponse = client().prepareSearch("test").setQuery(matchQuery("text", "quick brown").operator(MatchQueryBuilder.Operator.AND)).get(); + searchResponse = client().prepareSearch("test").setQuery(matchQuery("text", "quick brown").operator(Operator.AND)).get(); assertHitCount(searchResponse, 1); - searchResponse = client().prepareSearch("test").setQuery(matchQuery("text", "fast").operator(MatchQueryBuilder.Operator.AND)).get(); + searchResponse = client().prepareSearch("test").setQuery(matchQuery("text", "fast").operator(Operator.AND)).get(); assertHitCount(searchResponse, 1); client().prepareIndex("test", "test", "2").setSource("text", "fast brown fox").get(); refresh(); - searchResponse = client().prepareSearch("test").setQuery(matchQuery("text", "quick").operator(MatchQueryBuilder.Operator.AND)).get(); + searchResponse = client().prepareSearch("test").setQuery(matchQuery("text", "quick").operator(Operator.AND)).get(); assertHitCount(searchResponse, 2); - searchResponse = client().prepareSearch("test").setQuery(matchQuery("text", "quick brown").operator(MatchQueryBuilder.Operator.AND)).get(); + searchResponse = client().prepareSearch("test").setQuery(matchQuery("text", "quick brown").operator(Operator.AND)).get(); assertHitCount(searchResponse, 2); } @@ -1784,12 +1737,12 @@ public class SearchQueryIT extends ESIntegTestCase { client().prepareIndex("test", "test", "1").setSource("text", "the fox runs across the street").get(); refresh(); - SearchResponse searchResponse = client().prepareSearch("test").setQuery(matchQuery("text", "fox runs").operator(MatchQueryBuilder.Operator.AND)).get(); + SearchResponse searchResponse = client().prepareSearch("test").setQuery(matchQuery("text", "fox runs").operator(Operator.AND)).get(); assertHitCount(searchResponse, 1); client().prepareIndex("test", "test", "2").setSource("text", "run fox run").get(); refresh(); - searchResponse = client().prepareSearch("test").setQuery(matchQuery("text", "fox runs").operator(MatchQueryBuilder.Operator.AND)).get(); + searchResponse = client().prepareSearch("test").setQuery(matchQuery("text", "fox runs").operator(Operator.AND)).get(); assertHitCount(searchResponse, 2); } @@ -1810,19 +1763,19 @@ public class SearchQueryIT extends ESIntegTestCase { client().prepareIndex("test", "test", "1").setSource("text", "quick brown fox").get(); refresh(); - SearchResponse searchResponse = client().prepareSearch("test").setQuery(queryStringQuery("quick").defaultField("text").defaultOperator(QueryStringQueryBuilder.Operator.AND)).get(); + SearchResponse searchResponse = client().prepareSearch("test").setQuery(queryStringQuery("quick").defaultField("text").defaultOperator(Operator.AND)).get(); assertHitCount(searchResponse, 1); - searchResponse = client().prepareSearch("test").setQuery(queryStringQuery("quick brown").defaultField("text").defaultOperator(QueryStringQueryBuilder.Operator.AND)).get(); + searchResponse = client().prepareSearch("test").setQuery(queryStringQuery("quick brown").defaultField("text").defaultOperator(Operator.AND)).get(); assertHitCount(searchResponse, 1); - searchResponse = client().prepareSearch().setQuery(queryStringQuery("fast").defaultField("text").defaultOperator(QueryStringQueryBuilder.Operator.AND)).get(); + searchResponse = client().prepareSearch().setQuery(queryStringQuery("fast").defaultField("text").defaultOperator(Operator.AND)).get(); assertHitCount(searchResponse, 1); client().prepareIndex("test", "test", "2").setSource("text", "fast brown fox").get(); refresh(); - searchResponse = client().prepareSearch("test").setQuery(queryStringQuery("quick").defaultField("text").defaultOperator(QueryStringQueryBuilder.Operator.AND)).get(); + searchResponse = client().prepareSearch("test").setQuery(queryStringQuery("quick").defaultField("text").defaultOperator(Operator.AND)).get(); assertHitCount(searchResponse, 2); - searchResponse = client().prepareSearch("test").setQuery(queryStringQuery("quick brown").defaultField("text").defaultOperator(QueryStringQueryBuilder.Operator.AND)).get(); + searchResponse = client().prepareSearch("test").setQuery(queryStringQuery("quick brown").defaultField("text").defaultOperator(Operator.AND)).get(); assertHitCount(searchResponse, 2); } @@ -1848,7 +1801,7 @@ public class SearchQueryIT extends ESIntegTestCase { SearchResponse response = client() .prepareSearch("test") .setQuery( - queryStringQuery("foo.baz").useDisMax(false).defaultOperator(QueryStringQueryBuilder.Operator.AND) + queryStringQuery("foo.baz").useDisMax(false).defaultOperator(Operator.AND) .field("field1").field("field2")).get(); assertHitCount(response, 1l); } @@ -1861,15 +1814,15 @@ public class SearchQueryIT extends ESIntegTestCase { refresh(); SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(multiMatchQuery("value2", "field1^2", "field2").lenient(true).useDisMax(false)).get(); + .setQuery(multiMatchQuery("value2", "field2").field("field1", 2).lenient(true).useDisMax(false)).get(); assertHitCount(searchResponse, 1l); searchResponse = client().prepareSearch("test") - .setQuery(multiMatchQuery("value2", "field1^2", "field2").lenient(true).useDisMax(true)).get(); + .setQuery(multiMatchQuery("value2", "field2").field("field1", 2).lenient(true).useDisMax(true)).get(); assertHitCount(searchResponse, 1l); searchResponse = client().prepareSearch("test") - .setQuery(multiMatchQuery("value2", "field2^2").lenient(true)).get(); + .setQuery(multiMatchQuery("value2").field("field2", 2).lenient(true)).get(); assertHitCount(searchResponse, 1l); } @@ -1914,7 +1867,7 @@ public class SearchQueryIT extends ESIntegTestCase { assertSearchHits(searchResponse, "1", "2", "3"); searchResponse = client().prepareSearch("index1", "index2", "index3") .setQuery(indicesQuery(matchQuery("text", "value1"), "index1") - .noMatchQuery("all")).get(); + .noMatchQuery(QueryBuilders.matchAllQuery())).get(); assertHitCount(searchResponse, 3l); assertSearchHits(searchResponse, "1", "2", "3"); @@ -1945,7 +1898,7 @@ public class SearchQueryIT extends ESIntegTestCase { } catch (SearchPhaseExecutionException e) { assertThat(e.shardFailures().length, greaterThan(0)); for (ShardSearchFailure shardSearchFailure : e.shardFailures()) { - assertThat(shardSearchFailure.reason(), containsString("No mapping for for type [child]")); + assertThat(shardSearchFailure.reason(), containsString("no mapping found for type [child]")); } } @@ -2152,7 +2105,7 @@ functionScoreQuery(scriptFunction(new Script("_doc['score'].value")))).setMinSco client().prepareSearch("test") .setQuery(QueryBuilders.rangeQuery("date").from(1388534400000L).to(1388537940999L).timeZone("+01:00")) .get(); - fail("A Range Filter using ms since epoch with a TimeZone should raise a QueryParsingException"); + fail("A Range Filter using ms since epoch with a TimeZone should raise a ParsingException"); } catch (SearchPhaseExecutionException e) { // We expect it } @@ -2174,7 +2127,7 @@ functionScoreQuery(scriptFunction(new Script("_doc['score'].value")))).setMinSco client().prepareSearch("test") .setQuery(QueryBuilders.rangeQuery("num").from("0").to("4").timeZone("-01:00")) .get(); - fail("A Range Filter on a numeric field with a TimeZone should raise a QueryParsingException"); + fail("A Range Filter on a numeric field with a TimeZone should raise a ParsingException"); } catch (SearchPhaseExecutionException e) { // We expect it } diff --git a/core/src/test/java/org/elasticsearch/search/query/SimpleQueryStringIT.java b/core/src/test/java/org/elasticsearch/search/query/SimpleQueryStringIT.java index 3a857bf7845..bf3e458cb5b 100644 --- a/core/src/test/java/org/elasticsearch/search/query/SimpleQueryStringIT.java +++ b/core/src/test/java/org/elasticsearch/search/query/SimpleQueryStringIT.java @@ -24,7 +24,7 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.query.BoolQueryBuilder; -import org.elasticsearch.index.query.SimpleQueryStringBuilder; +import org.elasticsearch.index.query.Operator; import org.elasticsearch.index.query.SimpleQueryStringFlag; import org.elasticsearch.test.ESIntegTestCase; import org.junit.Test; @@ -34,10 +34,7 @@ import java.util.Locale; import java.util.concurrent.ExecutionException; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.index.query.QueryBuilders.boolQuery; -import static org.elasticsearch.index.query.QueryBuilders.queryStringQuery; -import static org.elasticsearch.index.query.QueryBuilders.simpleQueryStringQuery; -import static org.elasticsearch.index.query.QueryBuilders.termQuery; +import static org.elasticsearch.index.query.QueryBuilders.*; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*; import static org.hamcrest.Matchers.equalTo; @@ -71,7 +68,7 @@ public class SimpleQueryStringIT extends ESIntegTestCase { assertFirstHit(searchResponse, hasId("3")); searchResponse = client().prepareSearch().setQuery( - simpleQueryStringQuery("foo bar").defaultOperator(SimpleQueryStringBuilder.Operator.AND)).get(); + simpleQueryStringQuery("foo bar").defaultOperator(Operator.AND)).get(); assertHitCount(searchResponse, 1l); assertFirstHit(searchResponse, hasId("3")); @@ -252,21 +249,21 @@ public class SimpleQueryStringIT extends ESIntegTestCase { searchResponse = client().prepareSearch().setQuery( simpleQueryStringQuery("foo | bar") - .defaultOperator(SimpleQueryStringBuilder.Operator.AND) + .defaultOperator(Operator.AND) .flags(SimpleQueryStringFlag.OR)).get(); assertHitCount(searchResponse, 3l); assertSearchHits(searchResponse, "1", "2", "3"); searchResponse = client().prepareSearch().setQuery( simpleQueryStringQuery("foo | bar") - .defaultOperator(SimpleQueryStringBuilder.Operator.AND) + .defaultOperator(Operator.AND) .flags(SimpleQueryStringFlag.NONE)).get(); assertHitCount(searchResponse, 1l); assertFirstHit(searchResponse, hasId("3")); searchResponse = client().prepareSearch().setQuery( simpleQueryStringQuery("baz | egg*") - .defaultOperator(SimpleQueryStringBuilder.Operator.AND) + .defaultOperator(Operator.AND) .flags(SimpleQueryStringFlag.NONE)).get(); assertHitCount(searchResponse, 0l); @@ -283,7 +280,7 @@ public class SimpleQueryStringIT extends ESIntegTestCase { searchResponse = client().prepareSearch().setQuery( simpleQueryStringQuery("baz | egg*") - .defaultOperator(SimpleQueryStringBuilder.Operator.AND) + .defaultOperator(Operator.AND) .flags(SimpleQueryStringFlag.WHITESPACE, SimpleQueryStringFlag.PREFIX)).get(); assertHitCount(searchResponse, 1l); assertFirstHit(searchResponse, hasId("4")); diff --git a/core/src/test/java/org/elasticsearch/search/rescore/QueryRescorerIT.java b/core/src/test/java/org/elasticsearch/search/rescore/QueryRescorerIT.java index 6aa31ca2773..5a22afe4577 100644 --- a/core/src/test/java/org/elasticsearch/search/rescore/QueryRescorerIT.java +++ b/core/src/test/java/org/elasticsearch/search/rescore/QueryRescorerIT.java @@ -28,11 +28,11 @@ import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.common.lucene.search.function.CombineFunction; -import org.elasticsearch.common.settings.Settings.Builder; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.Settings.Builder; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.index.query.MatchQueryBuilder; +import org.elasticsearch.index.query.Operator; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders; import org.elasticsearch.script.Script; @@ -45,22 +45,11 @@ import org.junit.Test; import java.util.Arrays; import java.util.Comparator; -import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; +import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFirstHit; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFourthHit; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSecondHit; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertThirdHit; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasId; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasScore; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.lessThanOrEqualTo; -import static org.hamcrest.Matchers.notNullValue; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*; +import static org.hamcrest.Matchers.*; /** * @@ -83,9 +72,9 @@ public class QueryRescorerIT extends ESIntegTestCase { SearchResponse searchResponse = client().prepareSearch() .setQuery(QueryBuilders.matchAllQuery()) .setRescorer(RescoreBuilder.queryRescorer( - QueryBuilders.functionScoreQuery(QueryBuilders.matchAllQuery()) - .boostMode("replace").add(ScoreFunctionBuilders.weightFactorFunction(100))).setQueryWeight(0.0f).setRescoreQueryWeight(1.0f)) - .setRescoreWindow(1).setSize(randomIntBetween(2,10)).execute().actionGet(); + QueryBuilders.functionScoreQuery(QueryBuilders.matchAllQuery(), + ScoreFunctionBuilders.weightFactorFunction(100)).boostMode(CombineFunction.REPLACE)).setQueryWeight(0.0f).setRescoreQueryWeight(1.0f)) + .setRescoreWindow(1).setSize(randomIntBetween(2, 10)).execute().actionGet(); assertSearchResponse(searchResponse); assertFirstHit(searchResponse, hasScore(100.f)); int numDocsWith100AsAScore = 0; @@ -116,7 +105,7 @@ public class QueryRescorerIT extends ESIntegTestCase { ensureYellow(); refresh(); SearchResponse searchResponse = client().prepareSearch() - .setQuery(QueryBuilders.matchQuery("field1", "the quick brown").operator(MatchQueryBuilder.Operator.OR)) + .setQuery(QueryBuilders.matchQuery("field1", "the quick brown").operator(Operator.OR)) .setRescorer(RescoreBuilder.queryRescorer(QueryBuilders.matchPhraseQuery("field1", "quick brown").slop(2).boost(4.0f)).setRescoreQueryWeight(2)) .setRescoreWindow(5).execute().actionGet(); @@ -126,7 +115,7 @@ public class QueryRescorerIT extends ESIntegTestCase { assertThat(searchResponse.getHits().getHits()[2].getId(), equalTo("2")); searchResponse = client().prepareSearch() - .setQuery(QueryBuilders.matchQuery("field1", "the quick brown").operator(MatchQueryBuilder.Operator.OR)) + .setQuery(QueryBuilders.matchQuery("field1", "the quick brown").operator(Operator.OR)) .setRescorer(RescoreBuilder.queryRescorer(QueryBuilders.matchPhraseQuery("field1", "the quick brown").slop(3))) .setRescoreWindow(5).execute().actionGet(); @@ -136,7 +125,7 @@ public class QueryRescorerIT extends ESIntegTestCase { assertThirdHit(searchResponse, hasId("3")); searchResponse = client().prepareSearch() - .setQuery(QueryBuilders.matchQuery("field1", "the quick brown").operator(MatchQueryBuilder.Operator.OR)) + .setQuery(QueryBuilders.matchQuery("field1", "the quick brown").operator(Operator.OR)) .setRescorer(RescoreBuilder.queryRescorer((QueryBuilders.matchPhraseQuery("field1", "the quick brown")))) .setRescoreWindow(5).execute().actionGet(); @@ -179,7 +168,7 @@ public class QueryRescorerIT extends ESIntegTestCase { client().admin().indices().prepareRefresh("test").execute().actionGet(); SearchResponse searchResponse = client() .prepareSearch() - .setQuery(QueryBuilders.matchQuery("field1", "lexington avenue massachusetts").operator(MatchQueryBuilder.Operator.OR)) + .setQuery(QueryBuilders.matchQuery("field1", "lexington avenue massachusetts").operator(Operator.OR)) .setFrom(0) .setSize(5) .setRescorer( @@ -194,7 +183,7 @@ public class QueryRescorerIT extends ESIntegTestCase { searchResponse = client() .prepareSearch() - .setQuery(QueryBuilders.matchQuery("field1", "lexington avenue massachusetts").operator(MatchQueryBuilder.Operator.OR)) + .setQuery(QueryBuilders.matchQuery("field1", "lexington avenue massachusetts").operator(Operator.OR)) .setFrom(0) .setSize(5) .setSearchType(SearchType.DFS_QUERY_THEN_FETCH) @@ -211,7 +200,7 @@ public class QueryRescorerIT extends ESIntegTestCase { // Make sure non-zero from works: searchResponse = client() .prepareSearch() - .setQuery(QueryBuilders.matchQuery("field1", "lexington avenue massachusetts").operator(MatchQueryBuilder.Operator.OR)) + .setQuery(QueryBuilders.matchQuery("field1", "lexington avenue massachusetts").operator(Operator.OR)) .setFrom(2) .setSize(5) .setSearchType(SearchType.DFS_QUERY_THEN_FETCH) @@ -320,7 +309,7 @@ public class QueryRescorerIT extends ESIntegTestCase { SearchResponse searchResponse = client() .prepareSearch() - .setQuery(QueryBuilders.matchQuery("field1", "massachusetts").operator(MatchQueryBuilder.Operator.OR)) + .setQuery(QueryBuilders.matchQuery("field1", "massachusetts").operator(Operator.OR)) .setFrom(0) .setSize(5).execute().actionGet(); assertThat(searchResponse.getHits().hits().length, equalTo(4)); @@ -333,7 +322,7 @@ public class QueryRescorerIT extends ESIntegTestCase { // Now, penalizing rescore (nothing matches the rescore query): searchResponse = client() .prepareSearch() - .setQuery(QueryBuilders.matchQuery("field1", "massachusetts").operator(MatchQueryBuilder.Operator.OR)) + .setQuery(QueryBuilders.matchQuery("field1", "massachusetts").operator(Operator.OR)) .setFrom(0) .setSize(5) .setRescorer( @@ -425,7 +414,7 @@ public class QueryRescorerIT extends ESIntegTestCase { .prepareSearch() .setSearchType(SearchType.QUERY_THEN_FETCH) .setPreference("test") // ensure we hit the same shards for tie-breaking - .setQuery(QueryBuilders.matchQuery("field1", query).operator(MatchQueryBuilder.Operator.OR)) + .setQuery(QueryBuilders.matchQuery("field1", query).operator(Operator.OR)) .setFrom(0) .setSize(resultSize) .setRescorer( @@ -440,7 +429,7 @@ public class QueryRescorerIT extends ESIntegTestCase { SearchResponse plain = client().prepareSearch() .setSearchType(SearchType.QUERY_THEN_FETCH) .setPreference("test") // ensure we hit the same shards for tie-breaking - .setQuery(QueryBuilders.matchQuery("field1", query).operator(MatchQueryBuilder.Operator.OR)).setFrom(0).setSize(resultSize) + .setQuery(QueryBuilders.matchQuery("field1", query).operator(Operator.OR)).setFrom(0).setSize(resultSize) .execute().actionGet(); // check equivalence @@ -450,7 +439,7 @@ public class QueryRescorerIT extends ESIntegTestCase { .prepareSearch() .setSearchType(SearchType.QUERY_THEN_FETCH) .setPreference("test") // ensure we hit the same shards for tie-breaking - .setQuery(QueryBuilders.matchQuery("field1", query).operator(MatchQueryBuilder.Operator.OR)) + .setQuery(QueryBuilders.matchQuery("field1", query).operator(Operator.OR)) .setFrom(0) .setSize(resultSize) .setRescorer( @@ -468,7 +457,7 @@ public class QueryRescorerIT extends ESIntegTestCase { .prepareSearch() .setSearchType(SearchType.QUERY_THEN_FETCH) .setPreference("test") // ensure we hit the same shards for tie-breaking - .setQuery(QueryBuilders.matchQuery("field1", query).operator(MatchQueryBuilder.Operator.OR)) + .setQuery(QueryBuilders.matchQuery("field1", query).operator(Operator.OR)) .setFrom(0) .setSize(resultSize) .setRescorer( @@ -503,7 +492,7 @@ public class QueryRescorerIT extends ESIntegTestCase { SearchResponse searchResponse = client() .prepareSearch() .setSearchType(SearchType.DFS_QUERY_THEN_FETCH) - .setQuery(QueryBuilders.matchQuery("field1", "the quick brown").operator(MatchQueryBuilder.Operator.OR)) + .setQuery(QueryBuilders.matchQuery("field1", "the quick brown").operator(Operator.OR)) .setRescorer( RescoreBuilder.queryRescorer(QueryBuilders.matchPhraseQuery("field1", "the quick brown").slop(2).boost(4.0f)) .setQueryWeight(0.5f).setRescoreQueryWeight(0.4f)).setRescoreWindow(5).setExplain(true).execute() @@ -541,7 +530,7 @@ public class QueryRescorerIT extends ESIntegTestCase { SearchResponse searchResponse = client() .prepareSearch() .setSearchType(SearchType.DFS_QUERY_THEN_FETCH) - .setQuery(QueryBuilders.matchQuery("field1", "the quick brown").operator(MatchQueryBuilder.Operator.OR)) + .setQuery(QueryBuilders.matchQuery("field1", "the quick brown").operator(Operator.OR)) .setRescorer(innerRescoreQuery).setRescoreWindow(5).setExplain(true).execute() .actionGet(); assertHitCount(searchResponse, 3); @@ -564,7 +553,7 @@ public class QueryRescorerIT extends ESIntegTestCase { searchResponse = client() .prepareSearch() .setSearchType(SearchType.DFS_QUERY_THEN_FETCH) - .setQuery(QueryBuilders.matchQuery("field1", "the quick brown").operator(MatchQueryBuilder.Operator.OR)) + .setQuery(QueryBuilders.matchQuery("field1", "the quick brown").operator(Operator.OR)) .addRescorer(innerRescoreQuery).setRescoreWindow(5) .addRescorer(outerRescoreQuery).setRescoreWindow(10) .setExplain(true).get(); @@ -598,15 +587,12 @@ public class QueryRescorerIT extends ESIntegTestCase { .queryRescorer( QueryBuilders.boolQuery() .disableCoord(true) - .should(QueryBuilders.functionScoreQuery(QueryBuilders.termQuery("field1", intToEnglish[0])) - .boostMode(CombineFunction.REPLACE) - .add(ScoreFunctionBuilders.scriptFunction(new Script("5.0f")))) - .should(QueryBuilders.functionScoreQuery(QueryBuilders.termQuery("field1", intToEnglish[1])) - .boostMode(CombineFunction.REPLACE) - .add(ScoreFunctionBuilders.scriptFunction(new Script("7.0f")))) - .should(QueryBuilders.functionScoreQuery(QueryBuilders.termQuery("field1", intToEnglish[3])) - .boostMode(CombineFunction.REPLACE) - .add(ScoreFunctionBuilders.scriptFunction(new Script("0.0f"))))) + .should(QueryBuilders.functionScoreQuery(QueryBuilders.termQuery("field1", intToEnglish[0]), + ScoreFunctionBuilders.scriptFunction(new Script("5.0f"))).boostMode(CombineFunction.REPLACE)) + .should(QueryBuilders.functionScoreQuery(QueryBuilders.termQuery("field1", intToEnglish[1]), + ScoreFunctionBuilders.scriptFunction(new Script("7.0f"))).boostMode(CombineFunction.REPLACE)) + .should(QueryBuilders.functionScoreQuery(QueryBuilders.termQuery("field1", intToEnglish[3]), + ScoreFunctionBuilders.scriptFunction(new Script("0.0f"))).boostMode(CombineFunction.REPLACE))) .setQueryWeight(primaryWeight) .setRescoreQueryWeight(secondaryWeight); @@ -619,22 +605,18 @@ public class QueryRescorerIT extends ESIntegTestCase { .setPreference("test") // ensure we hit the same shards for tie-breaking .setQuery(QueryBuilders.boolQuery() .disableCoord(true) - .should(QueryBuilders.functionScoreQuery(QueryBuilders.termQuery("field1", intToEnglish[0])) - .boostMode(CombineFunction.REPLACE) - .add(ScoreFunctionBuilders.scriptFunction(new Script("2.0f")))) - .should(QueryBuilders.functionScoreQuery(QueryBuilders.termQuery("field1", intToEnglish[1])) - .boostMode(CombineFunction.REPLACE) - .add(ScoreFunctionBuilders.scriptFunction(new Script("3.0f")))) - .should(QueryBuilders.functionScoreQuery(QueryBuilders.termQuery("field1", intToEnglish[2])) - .boostMode(CombineFunction.REPLACE) - .add(ScoreFunctionBuilders.scriptFunction(new Script("5.0f")))) - .should(QueryBuilders.functionScoreQuery(QueryBuilders.termQuery("field1", intToEnglish[3])) - .boostMode(CombineFunction.REPLACE) - .add(ScoreFunctionBuilders.scriptFunction(new Script("0.2f"))))) - .setFrom(0) - .setSize(10) - .setRescorer(rescoreQuery) - .setRescoreWindow(50).execute().actionGet(); + .should(QueryBuilders.functionScoreQuery(QueryBuilders.termQuery("field1", intToEnglish[0]), + ScoreFunctionBuilders.scriptFunction(new Script("2.0f"))).boostMode(CombineFunction.REPLACE)) + .should(QueryBuilders.functionScoreQuery(QueryBuilders.termQuery("field1", intToEnglish[1]), + ScoreFunctionBuilders.scriptFunction(new Script("3.0f"))).boostMode(CombineFunction.REPLACE)) + .should(QueryBuilders.functionScoreQuery(QueryBuilders.termQuery("field1", intToEnglish[2]), + ScoreFunctionBuilders.scriptFunction(new Script("5.0f"))).boostMode(CombineFunction.REPLACE)) + .should(QueryBuilders.functionScoreQuery(QueryBuilders.termQuery("field1", intToEnglish[3]), + ScoreFunctionBuilders.scriptFunction(new Script("0.2f"))).boostMode(CombineFunction.REPLACE))) + .setFrom(0) + .setSize(10) + .setRescorer(rescoreQuery) + .setRescoreWindow(50).execute().actionGet(); assertHitCount(rescored, 4); @@ -687,12 +669,11 @@ public class QueryRescorerIT extends ESIntegTestCase { public void testMultipleRescores() throws Exception { int numDocs = indexRandomNumbers("keyword", 1, true); QueryRescorer eightIsGreat = RescoreBuilder.queryRescorer( - QueryBuilders.functionScoreQuery(QueryBuilders.termQuery("field1", English.intToEnglish(8))).boostMode(CombineFunction.REPLACE) -.add(ScoreFunctionBuilders.scriptFunction(new Script("1000.0f")))).setScoreMode( - "total"); + QueryBuilders.functionScoreQuery(QueryBuilders.termQuery("field1", English.intToEnglish(8)), + ScoreFunctionBuilders.scriptFunction(new Script("1000.0f"))).boostMode(CombineFunction.REPLACE)).setScoreMode("total"); QueryRescorer sevenIsBetter = RescoreBuilder.queryRescorer( - QueryBuilders.functionScoreQuery(QueryBuilders.termQuery("field1", English.intToEnglish(7))).boostMode(CombineFunction.REPLACE) -.add(ScoreFunctionBuilders.scriptFunction(new Script("10000.0f")))) + QueryBuilders.functionScoreQuery(QueryBuilders.termQuery("field1", English.intToEnglish(7)), + ScoreFunctionBuilders.scriptFunction(new Script("10000.0f"))).boostMode(CombineFunction.REPLACE)) .setScoreMode("total"); // First set the rescore window large enough that both rescores take effect @@ -709,11 +690,11 @@ public class QueryRescorerIT extends ESIntegTestCase { // Now use one rescore to drag the number we're looking for into the window of another QueryRescorer ninetyIsGood = RescoreBuilder.queryRescorer( - QueryBuilders.functionScoreQuery(QueryBuilders.queryStringQuery("*ninety*")).boostMode(CombineFunction.REPLACE) - .add(ScoreFunctionBuilders.scriptFunction(new Script("1000.0f")))).setScoreMode("total"); + QueryBuilders.functionScoreQuery(QueryBuilders.queryStringQuery("*ninety*"), ScoreFunctionBuilders.scriptFunction(new Script("1000.0f"))) + .boostMode(CombineFunction.REPLACE)).setScoreMode("total"); QueryRescorer oneToo = RescoreBuilder.queryRescorer( - QueryBuilders.functionScoreQuery(QueryBuilders.queryStringQuery("*one*")).boostMode(CombineFunction.REPLACE) - .add(ScoreFunctionBuilders.scriptFunction(new Script("1000.0f")))).setScoreMode("total"); + QueryBuilders.functionScoreQuery(QueryBuilders.queryStringQuery("*one*"), ScoreFunctionBuilders.scriptFunction(new Script("1000.0f"))) + .boostMode(CombineFunction.REPLACE)).setScoreMode("total"); request.clearRescorers().addRescorer(ninetyIsGood).addRescorer(oneToo, 10); response = request.setSize(2).get(); assertFirstHit(response, hasId("91")); diff --git a/core/src/test/java/org/elasticsearch/test/ESTestCase.java b/core/src/test/java/org/elasticsearch/test/ESTestCase.java index a1c511dc2a2..8bbd978f226 100644 --- a/core/src/test/java/org/elasticsearch/test/ESTestCase.java +++ b/core/src/test/java/org/elasticsearch/test/ESTestCase.java @@ -225,7 +225,7 @@ public abstract class ESTestCase extends LuceneTestCase { } // ----------------------------------------------------------------- - // Test facilities and facades for subclasses. + // Test facilities and facades for subclasses. // ----------------------------------------------------------------- // TODO: replaces uses of getRandom() with random() @@ -305,6 +305,35 @@ public abstract class ESTestCase extends LuceneTestCase { return random().nextDouble(); } + /** + * Returns a double value in the interval [start, end) if lowerInclusive is + * set to true, (start, end) otherwise. + * + * @param start lower bound of interval to draw uniformly distributed random numbers from + * @param end upper bound + * @param lowerInclusive whether or not to include lower end of the interval + * */ + public static double randomDoubleBetween(double start, double end, boolean lowerInclusive) { + double result = 0.0; + + if (start == -Double.MAX_VALUE || end == Double.MAX_VALUE) { + // formula below does not work with very large doubles + result = Double.longBitsToDouble(randomLong()); + while (result < start || result > end || Double.isNaN(result)) { + result = Double.longBitsToDouble(randomLong()); + } + } else { + result = randomDouble(); + if (lowerInclusive == false) { + while (result <= 0.0) { + result = randomDouble(); + } + } + result = result * end + (1.0 - result) * start; + } + return result; + } + public static long randomLong() { return random().nextLong(); } @@ -364,17 +393,27 @@ public abstract class ESTestCase extends LuceneTestCase { return RandomizedTest.randomRealisticUnicodeOfCodepointLength(codePoints); } - public static String[] generateRandomStringArray(int maxArraySize, int maxStringSize, boolean allowNull) { + public static String[] generateRandomStringArray(int maxArraySize, int maxStringSize, boolean allowNull, boolean allowEmpty) { if (allowNull && random().nextBoolean()) { return null; } - String[] array = new String[random().nextInt(maxArraySize)]; // allow empty arrays - for (int i = 0; i < array.length; i++) { + int arraySize = randomIntBetween(allowEmpty ? 0 : 1, maxArraySize); + String[] array = new String[arraySize]; + for (int i = 0; i < arraySize; i++) { array[i] = RandomStrings.randomAsciiOfLength(random(), maxStringSize); } return array; } + public static String[] generateRandomStringArray(int maxArraySize, int maxStringSize, boolean allowNull) { + return generateRandomStringArray(maxArraySize, maxStringSize, allowNull, true); + } + + public static String randomTimeValue() { + final String[] values = new String[]{"d", "H", "ms", "s", "S", "w"}; + return randomIntBetween(0, 1000) + randomFrom(values); + } + /** * Runs the code block for 10 seconds waiting for no assertion to trip. */ @@ -471,7 +510,7 @@ public abstract class ESTestCase extends LuceneTestCase { */ @Override public Path getDataPath(String relativePath) { - // we override LTC behavior here: wrap even resources with mockfilesystems, + // we override LTC behavior here: wrap even resources with mockfilesystems, // because some code is buggy when it comes to multiple nio.2 filesystems // (e.g. FileSystemUtils, and likely some tests) try { diff --git a/core/src/test/java/org/elasticsearch/test/geo/RandomShapeGenerator.java b/core/src/test/java/org/elasticsearch/test/geo/RandomShapeGenerator.java index 7144ab71c22..2bf231e203f 100644 --- a/core/src/test/java/org/elasticsearch/test/geo/RandomShapeGenerator.java +++ b/core/src/test/java/org/elasticsearch/test/geo/RandomShapeGenerator.java @@ -242,7 +242,7 @@ public class RandomShapeGenerator { } } - protected static Point xRandomPoint(Random r) { + public static Point xRandomPoint(Random r) { return xRandomPointIn(r, ctx.getWorldBounds()); } @@ -256,7 +256,7 @@ public class RandomShapeGenerator { return p; } - protected static Rectangle xRandomRectangle(Random r, Point nearP) { + public static Rectangle xRandomRectangle(Random r, Point nearP) { Rectangle bounds = ctx.getWorldBounds(); if (nearP == null) nearP = xRandomPointIn(r, bounds); diff --git a/core/src/test/java/org/elasticsearch/test/transport/AssertingLocalTransport.java b/core/src/test/java/org/elasticsearch/test/transport/AssertingLocalTransport.java index d9b9b491c9d..c253a752d6b 100644 --- a/core/src/test/java/org/elasticsearch/test/transport/AssertingLocalTransport.java +++ b/core/src/test/java/org/elasticsearch/test/transport/AssertingLocalTransport.java @@ -80,7 +80,7 @@ public class AssertingLocalTransport extends LocalTransport { ElasticsearchAssertions.assertVersionSerializable(VersionUtils.randomVersionBetween(random, minVersion, maxVersion), response); super.handleParsedResponse(response, handler); } - + @Override public void sendRequest(final DiscoveryNode node, final long requestId, final String action, final TransportRequest request, TransportRequestOptions options) throws IOException, TransportException { ElasticsearchAssertions.assertVersionSerializable(VersionUtils.randomVersionBetween(random, minVersion, maxVersion), request); diff --git a/core/src/test/java/org/elasticsearch/transport/ContextAndHeaderTransportIT.java b/core/src/test/java/org/elasticsearch/transport/ContextAndHeaderTransportIT.java index 2810c09651d..abd0a063c90 100644 --- a/core/src/test/java/org/elasticsearch/transport/ContextAndHeaderTransportIT.java +++ b/core/src/test/java/org/elasticsearch/transport/ContextAndHeaderTransportIT.java @@ -54,11 +54,11 @@ import org.elasticsearch.index.query.GeoShapeQueryBuilder; import org.elasticsearch.index.query.MoreLikeThisQueryBuilder; import org.elasticsearch.index.query.MoreLikeThisQueryBuilder.Item; import org.elasticsearch.index.query.QueryBuilders; -import org.elasticsearch.index.query.TermsLookupQueryBuilder; +import org.elasticsearch.index.query.TermsQueryBuilder; +import org.elasticsearch.indices.cache.query.terms.TermsLookup; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.rest.RestController; import org.elasticsearch.script.Script; -import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptService.ScriptType; import org.elasticsearch.script.Template; import org.elasticsearch.script.groovy.GroovyScriptEngineService; @@ -161,7 +161,7 @@ public class ContextAndHeaderTransportIT extends ESIntegTestCase { .setSource(jsonBuilder().startObject().field("username", "foo").endObject()).get(); transportClient().admin().indices().prepareRefresh(queryIndex, lookupIndex).get(); - TermsLookupQueryBuilder termsLookupFilterBuilder = QueryBuilders.termsLookupQuery("username").lookupIndex(lookupIndex).lookupType("type").lookupId("1").lookupPath("followers"); + TermsQueryBuilder termsLookupFilterBuilder = QueryBuilders.termsLookupQuery("username", new TermsLookup(lookupIndex, "type", "1", "followers")); BoolQueryBuilder queryBuilder = QueryBuilders.boolQuery().must(QueryBuilders.matchAllQuery()).must(termsLookupFilterBuilder); SearchResponse searchResponse = transportClient() @@ -229,8 +229,8 @@ public class ContextAndHeaderTransportIT extends ESIntegTestCase { .get(); transportClient().admin().indices().prepareRefresh(lookupIndex, queryIndex).get(); - MoreLikeThisQueryBuilder moreLikeThisQueryBuilder = QueryBuilders.moreLikeThisQuery("name") - .addLikeItem(new Item(lookupIndex, "type", "1")) + MoreLikeThisQueryBuilder moreLikeThisQueryBuilder = QueryBuilders.moreLikeThisQuery(new String[] {"name"}, null, + new Item[] {new Item(lookupIndex, "type", "1")}) .minTermFreq(1) .minDocFreq(1); diff --git a/core/src/test/java/org/elasticsearch/validate/SimpleValidateQueryIT.java b/core/src/test/java/org/elasticsearch/validate/SimpleValidateQueryIT.java index 044bf30cd3c..22c959f5cd0 100644 --- a/core/src/test/java/org/elasticsearch/validate/SimpleValidateQueryIT.java +++ b/core/src/test/java/org/elasticsearch/validate/SimpleValidateQueryIT.java @@ -19,7 +19,6 @@ package org.elasticsearch.validate; import java.nio.charset.StandardCharsets; - import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryResponse; import org.elasticsearch.client.Client; @@ -28,6 +27,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.index.query.MoreLikeThisQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.test.ESIntegTestCase; @@ -236,7 +236,7 @@ public class SimpleValidateQueryIT extends ESIntegTestCase { containsString("+field:pidgin (field:huge field:brown)"), true); assertExplanation(QueryBuilders.commonTermsQuery("field", "the brown").analyzer("stop"), containsString("field:brown"), true); - + // match queries with cutoff frequency assertExplanation(QueryBuilders.matchQuery("field", "huge brown pidgin").cutoffFrequency(1), containsString("+field:pidgin (field:huge field:brown)"), true); @@ -250,10 +250,10 @@ public class SimpleValidateQueryIT extends ESIntegTestCase { containsString("field:jumps^0.75"), true); // more like this queries - assertExplanation(QueryBuilders.moreLikeThisQuery("field").ids("1") + assertExplanation(QueryBuilders.moreLikeThisQuery(new String[] { "field" }, null, MoreLikeThisQueryBuilder.ids("1")) .include(true).minTermFreq(1).minDocFreq(1).maxQueryTerms(2), containsString("field:huge field:pidgin"), true); - assertExplanation(QueryBuilders.moreLikeThisQuery("field").like("the huge pidgin") + assertExplanation(QueryBuilders.moreLikeThisQuery(new String[] { "field" }, new String[] {"the huge pidgin"}, null) .minTermFreq(1).minDocFreq(1).maxQueryTerms(2), containsString("field:huge field:pidgin"), true); } @@ -276,11 +276,7 @@ public class SimpleValidateQueryIT extends ESIntegTestCase { assertThat(client().admin().indices().prepareValidateQuery("test").setSource(new BytesArray("{\"query\": {\"term\" : { \"user\" : \"kimchy\" }}, \"foo\": \"bar\"}")).get().isValid(), equalTo(false)); } - private void assertExplanation(QueryBuilder queryBuilder, Matcher matcher) { - assertExplanation(queryBuilder, matcher, false); - } - - private void assertExplanation(QueryBuilder queryBuilder, Matcher matcher, boolean withRewrite) { + private static void assertExplanation(QueryBuilder queryBuilder, Matcher matcher, boolean withRewrite) { ValidateQueryResponse response = client().admin().indices().prepareValidateQuery("test") .setTypes("type1") .setQuery(queryBuilder) diff --git a/core/src/test/resources/org/elasticsearch/index/query/bool-filter.json b/core/src/test/resources/org/elasticsearch/index/query/bool-filter.json deleted file mode 100644 index 484e517fd47..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/bool-filter.json +++ /dev/null @@ -1,35 +0,0 @@ -{ - filtered:{ - query:{ - term:{ - "name.first":"shay" - } - }, - filter:{ - bool:{ - must:[ - { - term:{ - "name.first":"shay1" - } - }, - { - term:{ - "name.first":"shay4" - } - } - ], - must_not:{ - term:{ - "name.first":"shay2" - } - }, - should:{ - term:{ - "name.first":"shay3" - } - } - } - } - } -} \ No newline at end of file diff --git a/core/src/test/resources/org/elasticsearch/index/query/bool.json b/core/src/test/resources/org/elasticsearch/index/query/bool.json deleted file mode 100644 index 1619fcf48c8..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/bool.json +++ /dev/null @@ -1,30 +0,0 @@ -{ - bool:{ - must:[ - { - query_string:{ - default_field:"content", - query:"test1" - } - }, - { - query_string:{ - default_field:"content", - query:"test4" - } - } - ], - must_not:{ - query_string:{ - default_field:"content", - query:"test2" - } - }, - should:{ - query_string:{ - default_field:"content", - query:"test3" - } - } - } -} \ No newline at end of file diff --git a/core/src/test/resources/org/elasticsearch/index/query/boosting-query.json b/core/src/test/resources/org/elasticsearch/index/query/boosting-query.json deleted file mode 100644 index 87b6e6d158c..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/boosting-query.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "boosting":{ - "positive":{ - "term":{ - "field1":"value1" - } - }, - "negative":{ - "term":{ - "field2":"value2" - } - }, - "negative_boost":0.2 - } -} \ No newline at end of file diff --git a/core/src/test/resources/org/elasticsearch/index/query/child-mapping.json b/core/src/test/resources/org/elasticsearch/index/query/child-mapping.json deleted file mode 100644 index 6f3b6e5819e..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/child-mapping.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "child":{ - "properties":{ - "field":{ - "type":"string" - } - }, - "_parent" : { - "type" : "person" - } - } -} \ No newline at end of file diff --git a/core/src/test/resources/org/elasticsearch/index/query/constantScore-query.json b/core/src/test/resources/org/elasticsearch/index/query/constantScore-query.json deleted file mode 100644 index bf59bc5b472..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/constantScore-query.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - constant_score:{ - filter:{ - term:{ - "name.last":"banon" - } - } - } -} \ No newline at end of file diff --git a/core/src/test/resources/org/elasticsearch/index/query/data.json b/core/src/test/resources/org/elasticsearch/index/query/data.json deleted file mode 100644 index 79f139f84ba..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/data.json +++ /dev/null @@ -1,43 +0,0 @@ -{ - name:{ - first:"shay", - last:"banon" - }, - address:{ - first:{ - location:"first location" - }, - last:{ - location:"last location" - } - }, - age:32, - birthDate:"1977-11-15", - nerd:true, - dogs:["buck", "mia"], - complex:[ - { - value1:"value1" - }, - { - value2:"value2" - } - ], - complex2:[ - [ - { - value1:"value1" - } - ], - [ - { - value2:"value2" - } - ] - ], - nullValue:null, - "location":{ - "lat":1.1, - "lon":1.2 - } -} \ No newline at end of file diff --git a/core/src/test/resources/org/elasticsearch/index/query/date_range_filter_format.json b/core/src/test/resources/org/elasticsearch/index/query/date_range_filter_format.json deleted file mode 100644 index 94596788a23..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/date_range_filter_format.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "constant_score": { - "filter": { - "range" : { - "born" : { - "gte": "01/01/2012", - "lt": "2030", - "format": "dd/MM/yyyy||yyyy" - } - } - } - } -} diff --git a/core/src/test/resources/org/elasticsearch/index/query/date_range_filter_format_invalid.json b/core/src/test/resources/org/elasticsearch/index/query/date_range_filter_format_invalid.json deleted file mode 100644 index 7b5c2724429..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/date_range_filter_format_invalid.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "constant_score": { - "filter": { - "range" : { - "born" : { - "gte": "01/01/2012", - "lt": "2030", - "format": "yyyy" - } - } - } - } -} diff --git a/core/src/test/resources/org/elasticsearch/index/query/date_range_filter_timezone.json b/core/src/test/resources/org/elasticsearch/index/query/date_range_filter_timezone.json deleted file mode 100644 index 158550afbe2..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/date_range_filter_timezone.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "constant_score": { - "filter": { - "range" : { - "born" : { - "gte": "2012-01-01", - "lte": "now", - "time_zone": "+01:00" - } - } - } - } -} diff --git a/core/src/test/resources/org/elasticsearch/index/query/date_range_filter_timezone_numeric_field.json b/core/src/test/resources/org/elasticsearch/index/query/date_range_filter_timezone_numeric_field.json deleted file mode 100644 index 6e0719475b9..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/date_range_filter_timezone_numeric_field.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "constant_score": { - "filter": { - "range" : { - "age" : { - "gte": "0", - "lte": "100", - "time_zone": "-01:00" - } - } - } - } -} diff --git a/core/src/test/resources/org/elasticsearch/index/query/date_range_query_boundaries_exclusive.json b/core/src/test/resources/org/elasticsearch/index/query/date_range_query_boundaries_exclusive.json deleted file mode 100644 index 30fe50a1299..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/date_range_query_boundaries_exclusive.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "range" : { - "born" : { - "gt": "2014-11-05||/M", - "lt": "2014-12-08||/d" - } - } -} diff --git a/core/src/test/resources/org/elasticsearch/index/query/date_range_query_boundaries_inclusive.json b/core/src/test/resources/org/elasticsearch/index/query/date_range_query_boundaries_inclusive.json deleted file mode 100644 index 3f3aab0f6ca..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/date_range_query_boundaries_inclusive.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "range" : { - "born" : { - "gte": "2014-11-05||/M", - "lte": "2014-12-08||/d" - } - } -} diff --git a/core/src/test/resources/org/elasticsearch/index/query/date_range_query_format.json b/core/src/test/resources/org/elasticsearch/index/query/date_range_query_format.json deleted file mode 100644 index f679dc9696f..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/date_range_query_format.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "range" : { - "born" : { - "gte": "01/01/2012", - "lt": "2030", - "format": "dd/MM/yyyy||yyyy" - } - } -} diff --git a/core/src/test/resources/org/elasticsearch/index/query/date_range_query_format_invalid.json b/core/src/test/resources/org/elasticsearch/index/query/date_range_query_format_invalid.json deleted file mode 100644 index 307e9775e50..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/date_range_query_format_invalid.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "range" : { - "born" : { - "gte": "01/01/2012", - "lt": "2030", - "format": "yyyy" - } - } -} diff --git a/core/src/test/resources/org/elasticsearch/index/query/date_range_query_timezone.json b/core/src/test/resources/org/elasticsearch/index/query/date_range_query_timezone.json deleted file mode 100644 index 0cabb1511ac..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/date_range_query_timezone.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "range" : { - "born" : { - "gte": "2012-01-01", - "lte": "now", - "time_zone": "+01:00" - } - } -} diff --git a/core/src/test/resources/org/elasticsearch/index/query/date_range_query_timezone_numeric_field.json b/core/src/test/resources/org/elasticsearch/index/query/date_range_query_timezone_numeric_field.json deleted file mode 100644 index b7526a2c294..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/date_range_query_timezone_numeric_field.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "range" : { - "age" : { - "gte": "0", - "lte": "100", - "time_zone": "-01:00" - } - } -} diff --git a/core/src/test/resources/org/elasticsearch/index/query/disMax.json b/core/src/test/resources/org/elasticsearch/index/query/disMax.json deleted file mode 100644 index 99da2df0251..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/disMax.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - dis_max:{ - tie_breaker:0.7, - boost:1.2, - queries:[ - { - term:{ - "name.first":"first" - } - }, - { - term:{ - "name.last":"last" - } - } - ] - } -} \ No newline at end of file diff --git a/core/src/test/resources/org/elasticsearch/index/query/disMax2.json b/core/src/test/resources/org/elasticsearch/index/query/disMax2.json deleted file mode 100644 index ea92d6498f4..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/disMax2.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "dis_max":{ - "queries":[ - { - "prefix":{ - "name.first":{ - "value":"sh", - "boost":1.2 - } - } - } - ] - } -} \ No newline at end of file diff --git a/core/src/test/resources/org/elasticsearch/index/query/field3.json b/core/src/test/resources/org/elasticsearch/index/query/field3.json deleted file mode 100644 index 61e349f7b7f..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/field3.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - field:{ - age:{ - query:34, - boost:2.0, - enable_position_increments:false - } - } -} \ No newline at end of file diff --git a/core/src/test/resources/org/elasticsearch/index/query/function-filter-score-query.json b/core/src/test/resources/org/elasticsearch/index/query/function-filter-score-query.json deleted file mode 100644 index a7b4790fb75..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/function-filter-score-query.json +++ /dev/null @@ -1,24 +0,0 @@ - - -{ - "function_score":{ - "query":{ - "term":{ - "name.last":"banon" - } - }, - "functions": [ - { - "weight": 3, - "filter": { - "term":{ - "name.last":"banon" - } - } - } - ], - "boost" : 3, - "score_mode" : "avg", - "max_boost" : 10 - } -} \ No newline at end of file diff --git a/core/src/test/resources/org/elasticsearch/index/query/function-score-query-causing-NPE.json b/core/src/test/resources/org/elasticsearch/index/query/function-score-query-causing-NPE.json deleted file mode 100644 index 283682bd90f..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/function-score-query-causing-NPE.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "function_score": { - "script_score": { - "script": "_index['text']['foo'].tf()" - }, - "weight": 2 - } -} - diff --git a/core/src/test/resources/org/elasticsearch/index/query/fuzzy-with-fields.json b/core/src/test/resources/org/elasticsearch/index/query/fuzzy-with-fields.json deleted file mode 100644 index 7636496adc4..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/fuzzy-with-fields.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "fuzzy":{ - "name.first":{ - "value":"sh", - "fuzziness": "AUTO", - "prefix_length":1, - "boost":2.0 - } - } -} diff --git a/core/src/test/resources/org/elasticsearch/index/query/fuzzy-with-fields2.json b/core/src/test/resources/org/elasticsearch/index/query/fuzzy-with-fields2.json deleted file mode 100644 index 095ecc6341d..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/fuzzy-with-fields2.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "fuzzy":{ - "age":{ - "value":12, - "fuzziness":5, - "boost":2.0 - } - } -} diff --git a/core/src/test/resources/org/elasticsearch/index/query/fuzzy.json b/core/src/test/resources/org/elasticsearch/index/query/fuzzy.json deleted file mode 100644 index 27d8deebe5a..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/fuzzy.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "fuzzy":{ - "name.first":"sh" - } -} \ No newline at end of file diff --git a/core/src/test/resources/org/elasticsearch/index/query/geoShape-filter.json b/core/src/test/resources/org/elasticsearch/index/query/geoShape-filter.json deleted file mode 100644 index 192a41ef766..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/geoShape-filter.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "geo_shape" : { - "country" : { - "shape" : { - "type" : "Envelope", - "coordinates" : [ - [-45, 45], - [45, -45] - ] - }, - "relation" : "intersects" - } - } -} diff --git a/core/src/test/resources/org/elasticsearch/index/query/geoShape-query.json b/core/src/test/resources/org/elasticsearch/index/query/geoShape-query.json deleted file mode 100644 index e0af8278a53..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/geoShape-query.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "geo_shape" : { - "country" : { - "shape" : { - "type" : "Envelope", - "coordinates" : [ - [-45, 45], - [45, -45] - ] - }, - "relation" : "intersects" - } - } -} \ No newline at end of file diff --git a/core/src/test/resources/org/elasticsearch/index/query/geo_boundingbox-named.json b/core/src/test/resources/org/elasticsearch/index/query/geo_boundingbox-named.json deleted file mode 100644 index 4cfdb5ea0ed..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/geo_boundingbox-named.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "geo_bounding_box":{ - "location":{ - "top_left":[-70, 40], - "bottom_right":[-80, 30] - }, - "_name":"test" - } -} diff --git a/core/src/test/resources/org/elasticsearch/index/query/geo_boundingbox1.json b/core/src/test/resources/org/elasticsearch/index/query/geo_boundingbox1.json deleted file mode 100644 index fab83dc0471..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/geo_boundingbox1.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "geo_bounding_box":{ - "location":{ - "top_left":[-70, 40], - "bottom_right":[-80, 30] - } - } -} diff --git a/core/src/test/resources/org/elasticsearch/index/query/geo_boundingbox2.json b/core/src/test/resources/org/elasticsearch/index/query/geo_boundingbox2.json deleted file mode 100644 index e0e4c9bd378..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/geo_boundingbox2.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "geo_bounding_box":{ - "location":{ - "top_left":{ - "lat":40, - "lon":-70 - }, - "bottom_right":{ - "lat":30, - "lon":-80 - } - } - } -} diff --git a/core/src/test/resources/org/elasticsearch/index/query/geo_boundingbox3.json b/core/src/test/resources/org/elasticsearch/index/query/geo_boundingbox3.json deleted file mode 100644 index 49d25873df4..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/geo_boundingbox3.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "geo_bounding_box":{ - "location":{ - "top_left":"40, -70", - "bottom_right":"30, -80" - } - } -} diff --git a/core/src/test/resources/org/elasticsearch/index/query/geo_boundingbox4.json b/core/src/test/resources/org/elasticsearch/index/query/geo_boundingbox4.json deleted file mode 100644 index 19ec587e213..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/geo_boundingbox4.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "geo_bounding_box":{ - "location":{ - "top_left":"drn5x1g8cu2y", - "bottom_right":"30, -80" - } - } -} diff --git a/core/src/test/resources/org/elasticsearch/index/query/geo_boundingbox5.json b/core/src/test/resources/org/elasticsearch/index/query/geo_boundingbox5.json deleted file mode 100644 index 43678991302..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/geo_boundingbox5.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "geo_bounding_box":{ - "location":{ - "top_right":"40, -80", - "bottom_left":"30, -70" - } - } -} diff --git a/core/src/test/resources/org/elasticsearch/index/query/geo_boundingbox6.json b/core/src/test/resources/org/elasticsearch/index/query/geo_boundingbox6.json deleted file mode 100644 index ccadf927088..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/geo_boundingbox6.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "geo_bounding_box":{ - "location":{ - "right": -80, - "top": 40, - "left": -70, - "bottom": 30 - } - } -} diff --git a/core/src/test/resources/org/elasticsearch/index/query/geo_distance-named.json b/core/src/test/resources/org/elasticsearch/index/query/geo_distance-named.json deleted file mode 100644 index fd83153bd7d..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/geo_distance-named.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "geo_distance":{ - "distance":"12mi", - "location":{ - "lat":40, - "lon":-70 - }, - "_name":"test" - } -} diff --git a/core/src/test/resources/org/elasticsearch/index/query/geo_distance1.json b/core/src/test/resources/org/elasticsearch/index/query/geo_distance1.json deleted file mode 100644 index b970928bb0c..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/geo_distance1.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "geo_distance":{ - "distance":"12mi", - "location":{ - "lat":40, - "lon":-70 - } - } -} diff --git a/core/src/test/resources/org/elasticsearch/index/query/geo_distance10.json b/core/src/test/resources/org/elasticsearch/index/query/geo_distance10.json deleted file mode 100644 index 5ed970d44b0..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/geo_distance10.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "geo_distance":{ - "distance":19.312128, - "unit":"km", - "location":{ - "lat":40, - "lon":-70 - } - } -} diff --git a/core/src/test/resources/org/elasticsearch/index/query/geo_distance11.json b/core/src/test/resources/org/elasticsearch/index/query/geo_distance11.json deleted file mode 100644 index 45f0aabfb35..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/geo_distance11.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "geo_distance":{ - "distance":"19.312128km", - "location":{ - "lat":40, - "lon":-70 - } - } -} diff --git a/core/src/test/resources/org/elasticsearch/index/query/geo_distance12.json b/core/src/test/resources/org/elasticsearch/index/query/geo_distance12.json deleted file mode 100644 index 4a05cb2cb73..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/geo_distance12.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "geo_distance":{ - "distance":"12mi", - "unit":"km", - "location":{ - "lat":40, - "lon":-70 - } - } -} diff --git a/core/src/test/resources/org/elasticsearch/index/query/geo_distance2.json b/core/src/test/resources/org/elasticsearch/index/query/geo_distance2.json deleted file mode 100644 index d8a07e996d4..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/geo_distance2.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "geo_distance":{ - "distance":"12mi", - "location":[-70, 40] - } -} diff --git a/core/src/test/resources/org/elasticsearch/index/query/geo_distance3.json b/core/src/test/resources/org/elasticsearch/index/query/geo_distance3.json deleted file mode 100644 index 57ce12bdb85..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/geo_distance3.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "geo_distance":{ - "distance":"12mi", - "location":"40, -70" - } -} diff --git a/core/src/test/resources/org/elasticsearch/index/query/geo_distance4.json b/core/src/test/resources/org/elasticsearch/index/query/geo_distance4.json deleted file mode 100644 index c9fb66160c6..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/geo_distance4.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "geo_distance":{ - "distance":"12mi", - "location":"drn5x1g8cu2y" - } -} diff --git a/core/src/test/resources/org/elasticsearch/index/query/geo_distance5.json b/core/src/test/resources/org/elasticsearch/index/query/geo_distance5.json deleted file mode 100644 index 233b3310f1e..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/geo_distance5.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "geo_distance":{ - "distance":12, - "unit":"mi", - "location":{ - "lat":40, - "lon":-70 - } - } -} diff --git a/core/src/test/resources/org/elasticsearch/index/query/geo_distance6.json b/core/src/test/resources/org/elasticsearch/index/query/geo_distance6.json deleted file mode 100644 index 28a977a348b..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/geo_distance6.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "geo_distance":{ - "distance":"12", - "unit":"mi", - "location":{ - "lat":40, - "lon":-70 - } - } -} diff --git a/core/src/test/resources/org/elasticsearch/index/query/geo_distance7.json b/core/src/test/resources/org/elasticsearch/index/query/geo_distance7.json deleted file mode 100644 index b783b7f5c97..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/geo_distance7.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "geo_distance":{ - "distance":"19.312128", - "location":{ - "lat":40, - "lon":-70 - } - } -} diff --git a/core/src/test/resources/org/elasticsearch/index/query/geo_distance8.json b/core/src/test/resources/org/elasticsearch/index/query/geo_distance8.json deleted file mode 100644 index aa0373849a4..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/geo_distance8.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "geo_distance":{ - "distance":19.312128, - "location":{ - "lat":40, - "lon":-70 - } - } -} diff --git a/core/src/test/resources/org/elasticsearch/index/query/geo_distance9.json b/core/src/test/resources/org/elasticsearch/index/query/geo_distance9.json deleted file mode 100644 index 326f493617c..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/geo_distance9.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "geo_distance":{ - "distance":"19.312128", - "unit":"km", - "location":{ - "lat":40, - "lon":-70 - } - } -} diff --git a/core/src/test/resources/org/elasticsearch/index/query/geo_polygon-named.json b/core/src/test/resources/org/elasticsearch/index/query/geo_polygon-named.json deleted file mode 100644 index 891a0ac3328..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/geo_polygon-named.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "geo_polygon":{ - "location":{ - "points":[ - [-70, 40], - [-80, 30], - [-90, 20] - ] - }, - "_name":"test" - } -} diff --git a/core/src/test/resources/org/elasticsearch/index/query/geo_polygon1.json b/core/src/test/resources/org/elasticsearch/index/query/geo_polygon1.json deleted file mode 100644 index 50f60bc5dca..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/geo_polygon1.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "geo_polygon":{ - "location":{ - "points":[ - [-70, 40], - [-80, 30], - [-90, 20] - ] - } - } -} diff --git a/core/src/test/resources/org/elasticsearch/index/query/geo_polygon2.json b/core/src/test/resources/org/elasticsearch/index/query/geo_polygon2.json deleted file mode 100644 index 2b543a3a37c..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/geo_polygon2.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "geo_polygon":{ - "location":{ - "points":[ - { - "lat":40, - "lon":-70 - }, - { - "lat":30, - "lon":-80 - }, - { - "lat":20, - "lon":-90 - } - ] - } - } -} diff --git a/core/src/test/resources/org/elasticsearch/index/query/geo_polygon3.json b/core/src/test/resources/org/elasticsearch/index/query/geo_polygon3.json deleted file mode 100644 index e2c58bdbb14..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/geo_polygon3.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "geo_polygon":{ - "location":{ - "points":[ - "40, -70", - "30, -80", - "20, -90" - ] - } - } -} diff --git a/core/src/test/resources/org/elasticsearch/index/query/geo_polygon4.json b/core/src/test/resources/org/elasticsearch/index/query/geo_polygon4.json deleted file mode 100644 index 9c32b07b7b0..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/geo_polygon4.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "geo_polygon":{ - "location":{ - "points":[ - "drn5x1g8cu2y", - "30, -80", - "20, -90" - ] - } - } -} diff --git a/core/src/test/resources/org/elasticsearch/index/query/has-child-in-and-filter-cached.json b/core/src/test/resources/org/elasticsearch/index/query/has-child-in-and-filter-cached.json deleted file mode 100644 index 4b055cb246e..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/has-child-in-and-filter-cached.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "filtered":{ - "filter":{ - "and" : { - "filters" : [ - { - "has_child" : { - "type" : "child", - "query" : { - "match_all" : {} - } - } - } - ], - "_cache" : true - } - } - } -} \ No newline at end of file diff --git a/core/src/test/resources/org/elasticsearch/index/query/has-child-with-inner-hits.json b/core/src/test/resources/org/elasticsearch/index/query/has-child-with-inner-hits.json new file mode 100644 index 00000000000..38d4483e9fc --- /dev/null +++ b/core/src/test/resources/org/elasticsearch/index/query/has-child-with-inner-hits.json @@ -0,0 +1,30 @@ +{ + "has_child" : { + "query" : { + "range" : { + "mapped_string" : { + "from" : "agJhRET", + "to" : "zvqIq", + "include_lower" : true, + "include_upper" : true, + "boost" : 1.0 + } + } + }, + "child_type" : "child", + "score_mode" : "avg", + "min_children" : 883170873, + "max_children" : 1217235442, + "boost" : 2.0, + "_name" : "WNzYMJKRwePuRBh", + "inner_hits" : { + "name" : "inner_hits_name", + "size" : 100, + "sort" : [ { + "mapped_string" : { + "order" : "asc" + } + } ] + } + } +} diff --git a/core/src/test/resources/org/elasticsearch/index/query/has-child.json b/core/src/test/resources/org/elasticsearch/index/query/has-child.json deleted file mode 100644 index c87ac173624..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/has-child.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "filtered":{ - "filter":{ - "has_child" : { - "type" : "child", - "query" : { - "match_all" : {} - }, - "_cache" : true - } - } - } -} \ No newline at end of file diff --git a/core/src/test/resources/org/elasticsearch/index/query/mapping.json b/core/src/test/resources/org/elasticsearch/index/query/mapping.json deleted file mode 100644 index 39392492078..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/mapping.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "person":{ - "properties":{ - "location":{ - "type":"geo_point" - }, - "country" : { - "type" : "geo_shape" - }, - "born":{ - "type":"date" - } - } - } -} \ No newline at end of file diff --git a/core/src/test/resources/org/elasticsearch/index/query/match-query-bad-type.json b/core/src/test/resources/org/elasticsearch/index/query/match-query-bad-type.json deleted file mode 100644 index 47d122715ac..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/match-query-bad-type.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "match" : { - "message" : { - "query" : "this is a test", - "type" : "doesNotExist" - } - } -} \ No newline at end of file diff --git a/core/src/test/resources/org/elasticsearch/index/query/match-with-fuzzy-transpositions.json b/core/src/test/resources/org/elasticsearch/index/query/match-with-fuzzy-transpositions.json deleted file mode 100644 index 5f4fe8bcaca..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/match-with-fuzzy-transpositions.json +++ /dev/null @@ -1 +0,0 @@ -{ "match": { "body": { "query": "fuzzy", "fuzziness": 1, "fuzzy_transpositions": true }} } diff --git a/core/src/test/resources/org/elasticsearch/index/query/match-without-fuzzy-transpositions.json b/core/src/test/resources/org/elasticsearch/index/query/match-without-fuzzy-transpositions.json deleted file mode 100644 index 06c77aafb33..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/match-without-fuzzy-transpositions.json +++ /dev/null @@ -1 +0,0 @@ -{ "match": { "body": { "query": "fuzzy", "fuzziness": 1, "fuzzy_transpositions": false }} } diff --git a/core/src/test/resources/org/elasticsearch/index/query/matchAll.json b/core/src/test/resources/org/elasticsearch/index/query/matchAll.json deleted file mode 100644 index 33256469508..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/matchAll.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - match_all:{ - boost:1.2 - } -} \ No newline at end of file diff --git a/core/src/test/resources/org/elasticsearch/index/query/match_all_empty1.json b/core/src/test/resources/org/elasticsearch/index/query/match_all_empty1.json deleted file mode 100644 index 6dd141fe862..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/match_all_empty1.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "match_all": {} -} \ No newline at end of file diff --git a/core/src/test/resources/org/elasticsearch/index/query/match_all_empty2.json b/core/src/test/resources/org/elasticsearch/index/query/match_all_empty2.json deleted file mode 100644 index a0549df7130..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/match_all_empty2.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "match_all": [] -} \ No newline at end of file diff --git a/core/src/test/resources/org/elasticsearch/index/query/mlt-items.json b/core/src/test/resources/org/elasticsearch/index/query/mlt-items.json deleted file mode 100644 index d7839ac707b..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/mlt-items.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "more_like_this" : { - "fields" : ["name.first", "name.last"], - "like_text": "Apache Lucene", - "like" : [ - { - "_index" : "test", - "_type" : "person", - "_id" : "1" - }, - { - "_index" : "test", - "_type" : "person", - "_id" : "2" - } - ], - "ids" : ["3", "4"], - "include" : true, - "min_term_freq" : 1, - "max_query_terms" : 12 - } -} diff --git a/core/src/test/resources/org/elasticsearch/index/query/mlt.json b/core/src/test/resources/org/elasticsearch/index/query/mlt.json deleted file mode 100644 index d3d98bee5aa..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/mlt.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "more_like_this" : { - "fields" : ["name.first", "name.last"], - "like_text" : "something", - "min_term_freq" : 1, - "max_query_terms" : 12 - } -} \ No newline at end of file diff --git a/core/src/test/resources/org/elasticsearch/index/query/multiMatch-query-bad-type.json b/core/src/test/resources/org/elasticsearch/index/query/multiMatch-query-bad-type.json deleted file mode 100644 index 9c3b751082d..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/multiMatch-query-bad-type.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "multi_match": { - "query": "foo bar", - "fields": [ "myField", "otherField" ], - "type":"doesNotExist" - } -} \ No newline at end of file diff --git a/core/src/test/resources/org/elasticsearch/index/query/multiMatch-query-fields-as-string.json b/core/src/test/resources/org/elasticsearch/index/query/multiMatch-query-fields-as-string.json deleted file mode 100644 index d29211d69db..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/multiMatch-query-fields-as-string.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "multi_match": { - "query": "foo bar", - "fields": "myField" - } -} \ No newline at end of file diff --git a/core/src/test/resources/org/elasticsearch/index/query/multiMatch-query-simple.json b/core/src/test/resources/org/elasticsearch/index/query/multiMatch-query-simple.json deleted file mode 100644 index 904ba0e6ccb..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/multiMatch-query-simple.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "multi_match": { - "query": "foo bar", - "fields": [ "myField", "otherField" ] - } -} \ No newline at end of file diff --git a/core/src/test/resources/org/elasticsearch/index/query/not-filter.json b/core/src/test/resources/org/elasticsearch/index/query/not-filter.json deleted file mode 100644 index 46ba5cd485e..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/not-filter.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "not":{ - "query":{ - "term":{ - "name.first":"shay1" - } - } - } -} diff --git a/core/src/test/resources/org/elasticsearch/index/query/not-filter2.json b/core/src/test/resources/org/elasticsearch/index/query/not-filter2.json deleted file mode 100644 index bd576d5dbce..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/not-filter2.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "not":{ - "term":{ - "name.first":"shay1" - } - } -} diff --git a/core/src/test/resources/org/elasticsearch/index/query/not-filter3.json b/core/src/test/resources/org/elasticsearch/index/query/not-filter3.json deleted file mode 100644 index bd576d5dbce..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/not-filter3.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "not":{ - "term":{ - "name.first":"shay1" - } - } -} diff --git a/core/src/test/resources/org/elasticsearch/index/query/prefix-boost.json b/core/src/test/resources/org/elasticsearch/index/query/prefix-boost.json deleted file mode 100644 index 4da623ac49f..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/prefix-boost.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "prefix":{ - "name.first":{ - "value":"sh", - "boost":1.2 - } - } -} \ No newline at end of file diff --git a/core/src/test/resources/org/elasticsearch/index/query/prefix-with-boost.json b/core/src/test/resources/org/elasticsearch/index/query/prefix-with-boost.json deleted file mode 100644 index 83e56cb4d5e..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/prefix-with-boost.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - prefix:{ - "name.first":{ - prefix:"sh", - boost:2.0 - } - } -} \ No newline at end of file diff --git a/core/src/test/resources/org/elasticsearch/index/query/prefix.json b/core/src/test/resources/org/elasticsearch/index/query/prefix.json deleted file mode 100644 index 49f5261d1d9..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/prefix.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - prefix:{ - "name.first":"sh" - } -} \ No newline at end of file diff --git a/core/src/test/resources/org/elasticsearch/index/query/query-fields-match.json b/core/src/test/resources/org/elasticsearch/index/query/query-fields-match.json deleted file mode 100644 index c15cdf3c6d1..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/query-fields-match.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - query_string:{ - fields:["name.*"], - use_dis_max:false, - query:"test" - } -} \ No newline at end of file diff --git a/core/src/test/resources/org/elasticsearch/index/query/query-fields1.json b/core/src/test/resources/org/elasticsearch/index/query/query-fields1.json deleted file mode 100644 index 84abcaafc05..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/query-fields1.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - query_string:{ - fields:["content", "name"], - use_dis_max:false, - query:"test" - } -} \ No newline at end of file diff --git a/core/src/test/resources/org/elasticsearch/index/query/query-fields2.json b/core/src/test/resources/org/elasticsearch/index/query/query-fields2.json deleted file mode 100644 index ab39c8773dd..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/query-fields2.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - query_string:{ - fields:["content", "name"], - use_dis_max:true, - query:"test" - } -} \ No newline at end of file diff --git a/core/src/test/resources/org/elasticsearch/index/query/query-fields3.json b/core/src/test/resources/org/elasticsearch/index/query/query-fields3.json deleted file mode 100644 index 8114c1b3b86..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/query-fields3.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - query_string:{ - fields:["content^2.2", "name"], - use_dis_max:true, - query:"test" - } -} \ No newline at end of file diff --git a/core/src/test/resources/org/elasticsearch/index/query/query-filter.json b/core/src/test/resources/org/elasticsearch/index/query/query-filter.json deleted file mode 100644 index a154688cdcc..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/query-filter.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - term:{ - "name.first":"shay" - } -} diff --git a/core/src/test/resources/org/elasticsearch/index/query/query-regexp-max-determinized-states.json b/core/src/test/resources/org/elasticsearch/index/query/query-regexp-max-determinized-states.json deleted file mode 100644 index 023b90ec6fa..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/query-regexp-max-determinized-states.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - query_string: { - default_field: "content", - query:"/foo*bar/", - max_determinized_states: 5000 - } -} diff --git a/core/src/test/resources/org/elasticsearch/index/query/query-regexp-too-many-determinized-states.json b/core/src/test/resources/org/elasticsearch/index/query/query-regexp-too-many-determinized-states.json deleted file mode 100644 index 0d2d41a7e39..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/query-regexp-too-many-determinized-states.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - query_string: { - default_field: "content", - query: "/[ac]*a[ac]{50,200}/" - } -} diff --git a/core/src/test/resources/org/elasticsearch/index/query/query-timezone-incorrect.json b/core/src/test/resources/org/elasticsearch/index/query/query-timezone-incorrect.json deleted file mode 100644 index 3bffb0f4a40..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/query-timezone-incorrect.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "query_string":{ - "time_zone":"This timezone does not exist", - "query":"date:[2012 TO 2014]" - } -} diff --git a/core/src/test/resources/org/elasticsearch/index/query/query-timezone.json b/core/src/test/resources/org/elasticsearch/index/query/query-timezone.json deleted file mode 100644 index e2fcc0e2266..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/query-timezone.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "query_string":{ - "time_zone":"Europe/Paris", - "query":"date:[2012 TO 2014]" - } -} diff --git a/core/src/test/resources/org/elasticsearch/index/query/query.json b/core/src/test/resources/org/elasticsearch/index/query/query.json deleted file mode 100644 index f07a0d8a59a..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/query.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - query_string:{ - default_field:"content", - phrase_slop:1, - query:"test" - } -} \ No newline at end of file diff --git a/core/src/test/resources/org/elasticsearch/index/query/query2.json b/core/src/test/resources/org/elasticsearch/index/query/query2.json deleted file mode 100644 index 410e05cd808..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/query2.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - query_string:{ - default_field:"age", - query:"12~0.2" - } -} \ No newline at end of file diff --git a/core/src/test/resources/org/elasticsearch/index/query/range.json b/core/src/test/resources/org/elasticsearch/index/query/range.json deleted file mode 100644 index cc2363fc22b..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/range.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - range:{ - age:{ - from:"23", - to:"54", - include_lower:true, - include_upper:false - } - } -} \ No newline at end of file diff --git a/core/src/test/resources/org/elasticsearch/index/query/range2.json b/core/src/test/resources/org/elasticsearch/index/query/range2.json deleted file mode 100644 index c116b3c0a33..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/range2.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - range:{ - age:{ - gte:"23", - lt:"54" - } - } -} \ No newline at end of file diff --git a/core/src/test/resources/org/elasticsearch/index/query/regexp-boost.json b/core/src/test/resources/org/elasticsearch/index/query/regexp-boost.json deleted file mode 100644 index ed8699b39c5..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/regexp-boost.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "regexp":{ - "name.first":{ - "value":"sh", - "boost":1.2 - } - } -} \ No newline at end of file diff --git a/core/src/test/resources/org/elasticsearch/index/query/regexp-max-determinized-states.json b/core/src/test/resources/org/elasticsearch/index/query/regexp-max-determinized-states.json deleted file mode 100644 index df2f5cc6030..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/regexp-max-determinized-states.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "regexp": { - "name.first": { - "value": "s.*y", - "max_determinized_states": 5000 - } - } -} diff --git a/core/src/test/resources/org/elasticsearch/index/query/regexp.json b/core/src/test/resources/org/elasticsearch/index/query/regexp.json deleted file mode 100644 index 6c3d69469cb..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/regexp.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "regexp":{ - "name.first": "s.*y" - } -} \ No newline at end of file diff --git a/core/src/test/resources/org/elasticsearch/index/query/simple-query-string.json b/core/src/test/resources/org/elasticsearch/index/query/simple-query-string.json deleted file mode 100644 index 9208e8876fe..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/simple-query-string.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "simple_query_string": { - "query": "foo bar", - "analyzer": "keyword", - "fields": ["body^5","_all"], - "default_operator": "and" - } -} diff --git a/core/src/test/resources/org/elasticsearch/index/query/span-multi-term-fuzzy-range.json b/core/src/test/resources/org/elasticsearch/index/query/span-multi-term-fuzzy-range.json deleted file mode 100644 index d9ca05b3f3e..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/span-multi-term-fuzzy-range.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "span_multi":{ - "match":{ - "fuzzy":{ - "age":{ - "value":12, - "fuzziness":5, - "boost":2.0 - } - } - } - } -} diff --git a/core/src/test/resources/org/elasticsearch/index/query/span-multi-term-fuzzy-term.json b/core/src/test/resources/org/elasticsearch/index/query/span-multi-term-fuzzy-term.json deleted file mode 100644 index edb58e35ec1..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/span-multi-term-fuzzy-term.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "span_multi":{ - "match":{ - "fuzzy" : { - "user" : { - "value" : "ki", - "boost" : 1.08 - } - } - } - } -} diff --git a/core/src/test/resources/org/elasticsearch/index/query/span-multi-term-prefix.json b/core/src/test/resources/org/elasticsearch/index/query/span-multi-term-prefix.json deleted file mode 100644 index 62918d6ad71..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/span-multi-term-prefix.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "span_multi":{ - "match":{ - "prefix" : { "user" : { "value" : "ki", "boost" : 1.08 } } - } - } -} diff --git a/core/src/test/resources/org/elasticsearch/index/query/span-multi-term-range-numeric.json b/core/src/test/resources/org/elasticsearch/index/query/span-multi-term-range-numeric.json deleted file mode 100644 index d9db8a445cc..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/span-multi-term-range-numeric.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "span_multi":{ - "match":{ - "range" : { - "age" : { - "from" : 10, - "to" : 20, - "include_lower" : true, - "include_upper": false, - "boost" : 2.0 - } - } - } - } -} - diff --git a/core/src/test/resources/org/elasticsearch/index/query/span-multi-term-range-term.json b/core/src/test/resources/org/elasticsearch/index/query/span-multi-term-range-term.json deleted file mode 100644 index 8c4da31ebfc..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/span-multi-term-range-term.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "span_multi":{ - "match":{ - "range" : { - "user" : { - "from" : "alice", - "to" : "bob", - "include_lower" : true, - "include_upper": false, - "boost" : 2.0 - } - } - } - } -} - diff --git a/core/src/test/resources/org/elasticsearch/index/query/span-multi-term-wildcard.json b/core/src/test/resources/org/elasticsearch/index/query/span-multi-term-wildcard.json deleted file mode 100644 index a2eaeb72095..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/span-multi-term-wildcard.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "span_multi":{ - "match":{ - "wildcard" : { "user" : {"value": "ki*y" , "boost" : 1.08}} - } - } -} diff --git a/core/src/test/resources/org/elasticsearch/index/query/spanContaining.json b/core/src/test/resources/org/elasticsearch/index/query/spanContaining.json deleted file mode 100644 index 13f91d88b44..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/spanContaining.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - span_containing:{ - big:{ - span_term:{ - age:34 - } - }, - little:{ - span_term:{ - age:35 - } - } - } -} \ No newline at end of file diff --git a/core/src/test/resources/org/elasticsearch/index/query/spanFieldMaskingTerm.json b/core/src/test/resources/org/elasticsearch/index/query/spanFieldMaskingTerm.json deleted file mode 100644 index 9849c105e9a..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/spanFieldMaskingTerm.json +++ /dev/null @@ -1,29 +0,0 @@ -{ - span_near:{ - clauses:[ - { - span_term:{ - age:34 - } - }, - { - span_term:{ - age:35 - } - }, - { - field_masking_span:{ - query:{ - span_term:{ - age_1 : 36 - } - }, - field:"age" - } - } - ], - slop:12, - in_order:false, - collect_payloads:false - } -} \ No newline at end of file diff --git a/core/src/test/resources/org/elasticsearch/index/query/spanFirst.json b/core/src/test/resources/org/elasticsearch/index/query/spanFirst.json deleted file mode 100644 index 9972c769137..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/spanFirst.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - span_first:{ - match:{ - span_term:{ - age:34 - } - }, - end:12 - } -} \ No newline at end of file diff --git a/core/src/test/resources/org/elasticsearch/index/query/spanNear.json b/core/src/test/resources/org/elasticsearch/index/query/spanNear.json deleted file mode 100644 index ce17063978a..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/spanNear.json +++ /dev/null @@ -1,24 +0,0 @@ -{ - span_near:{ - clauses:[ - { - span_term:{ - age:34 - } - }, - { - span_term:{ - age:35 - } - }, - { - span_term:{ - age:36 - } - } - ], - slop:12, - in_order:false, - collect_payloads:false - } -} \ No newline at end of file diff --git a/core/src/test/resources/org/elasticsearch/index/query/spanNot.json b/core/src/test/resources/org/elasticsearch/index/query/spanNot.json deleted file mode 100644 index c90de330df7..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/spanNot.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - span_not:{ - include:{ - span_term:{ - age:34 - } - }, - exclude:{ - span_term:{ - age:35 - } - } - } -} \ No newline at end of file diff --git a/core/src/test/resources/org/elasticsearch/index/query/spanOr.json b/core/src/test/resources/org/elasticsearch/index/query/spanOr.json deleted file mode 100644 index 06c52628e5f..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/spanOr.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - span_or:{ - clauses:[ - { - span_term:{ - age:34 - } - }, - { - span_term:{ - age:35 - } - }, - { - span_term:{ - age:36 - } - } - ] - } -} \ No newline at end of file diff --git a/core/src/test/resources/org/elasticsearch/index/query/spanOr2.json b/core/src/test/resources/org/elasticsearch/index/query/spanOr2.json deleted file mode 100644 index b64ce1cae3f..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/spanOr2.json +++ /dev/null @@ -1,30 +0,0 @@ -{ - "span_or":{ - "clauses":[ - { - "span_term":{ - "age":{ - "value":34, - "boost":1.0 - } - } - }, - { - "span_term":{ - "age":{ - "value":35, - "boost":1.0 - } - } - }, - { - "span_term":{ - "age":{ - "value":36, - "boost":1.0 - } - } - } - ] - } -} \ No newline at end of file diff --git a/core/src/test/resources/org/elasticsearch/index/query/spanTerm.json b/core/src/test/resources/org/elasticsearch/index/query/spanTerm.json deleted file mode 100644 index 0186593ff58..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/spanTerm.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - span_term:{ - age:34 - } -} \ No newline at end of file diff --git a/core/src/test/resources/org/elasticsearch/index/query/spanWithin.json b/core/src/test/resources/org/elasticsearch/index/query/spanWithin.json deleted file mode 100644 index 7cf767cdf12..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/spanWithin.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - span_within:{ - big:{ - span_term:{ - age:34 - } - }, - little:{ - span_term:{ - age:35 - } - } - } -} \ No newline at end of file diff --git a/core/src/test/resources/org/elasticsearch/index/query/starColonStar.json b/core/src/test/resources/org/elasticsearch/index/query/starColonStar.json deleted file mode 100644 index c769ca09380..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/starColonStar.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "query_string": { - "query": "*:*" - } -} \ No newline at end of file diff --git a/core/src/test/resources/org/elasticsearch/index/query/term-array-invalid.json b/core/src/test/resources/org/elasticsearch/index/query/term-array-invalid.json deleted file mode 100644 index a198bc2dafc..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/term-array-invalid.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "term": { - "age": [34, 35] - } -} diff --git a/core/src/test/resources/org/elasticsearch/index/query/term-with-boost.json b/core/src/test/resources/org/elasticsearch/index/query/term-with-boost.json deleted file mode 100644 index 5f33cd55ea3..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/term-with-boost.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - term:{ - age:{ - value:34, - boost:2.0 - } - } -} \ No newline at end of file diff --git a/core/src/test/resources/org/elasticsearch/index/query/term.json b/core/src/test/resources/org/elasticsearch/index/query/term.json deleted file mode 100644 index 378cf42f04c..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/term.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - term:{ - age:34 - } -} \ No newline at end of file diff --git a/core/src/test/resources/org/elasticsearch/index/query/terms-filter-named.json b/core/src/test/resources/org/elasticsearch/index/query/terms-filter-named.json deleted file mode 100644 index 4a749dbf954..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/terms-filter-named.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "constant_score": { - "filter": { - "terms":{ - "name.last":["banon", "kimchy"], - "_name":"test" - } - } - } -} diff --git a/core/src/test/resources/org/elasticsearch/index/query/terms-filter.json b/core/src/test/resources/org/elasticsearch/index/query/terms-filter.json deleted file mode 100644 index 4a694bcb260..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/terms-filter.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "constant_score": { - "filter": { - "terms":{ - "name.last":["banon", "kimchy"] - } - } - } -} diff --git a/core/src/test/resources/org/elasticsearch/index/query/terms-query-options.json b/core/src/test/resources/org/elasticsearch/index/query/terms-query-options.json deleted file mode 100644 index 48263a5bc6e..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/terms-query-options.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "terms":{ - "name.first":["shay", "test", "elasticsearch"], - "disable_coord":true, - "boost":2.0, - "min_should_match":2 - } -} \ No newline at end of file diff --git a/core/src/test/resources/org/elasticsearch/index/query/terms-query.json b/core/src/test/resources/org/elasticsearch/index/query/terms-query.json deleted file mode 100644 index a3e0d084ad2..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/terms-query.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "terms":{ - "name.first":["shay", "test"] - } -} \ No newline at end of file diff --git a/core/src/test/resources/org/elasticsearch/index/query/wildcard-boost.json b/core/src/test/resources/org/elasticsearch/index/query/wildcard-boost.json deleted file mode 100644 index 53c8d82a8d4..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/wildcard-boost.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "wildcard":{ - "name.first":{ - "value":"sh*", - "boost":1.2 - } - } -} \ No newline at end of file diff --git a/core/src/test/resources/org/elasticsearch/index/query/wildcard.json b/core/src/test/resources/org/elasticsearch/index/query/wildcard.json deleted file mode 100644 index c8ed85262bd..00000000000 --- a/core/src/test/resources/org/elasticsearch/index/query/wildcard.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - wildcard:{ - "name.first":"sh*" - } -} \ No newline at end of file diff --git a/docs/reference/indices/put-mapping.asciidoc b/docs/reference/indices/put-mapping.asciidoc index 25b6ce33917..7d6e6587786 100644 --- a/docs/reference/indices/put-mapping.asciidoc +++ b/docs/reference/indices/put-mapping.asciidoc @@ -189,7 +189,7 @@ PUT my_index/_mapping/type_one?update_all_types <3> ----------------------------------- // AUTOSENSE <1> Create an index with two types, both of which contain a `text` field which have the same mapping. -<2> Tring to update the `search_analyzer` just for `type_one` throws an exception like `"Merge failed with failures..."`. +<2> Trying to update the `search_analyzer` just for `type_one` throws an exception like `"Merge failed with failures..."`. <3> Adding the `update_all_types` parameter updates the `text` field in `type_one` and `type_two`. diff --git a/docs/reference/migration/migrate_3_0.asciidoc b/docs/reference/migration/migrate_3_0.asciidoc index 5654d8d04ff..6607a98f0fa 100644 --- a/docs/reference/migration/migrate_3_0.asciidoc +++ b/docs/reference/migration/migrate_3_0.asciidoc @@ -97,3 +97,168 @@ Previously, there were three settings for the ping timeout: `discovery.zen.initi `discovery.zen.ping.timeout` and `discovery.zen.ping_timeout`. The former two have been removed and the only setting key for the ping timeout is now `discovery.zen.ping_timeout`. The default value for ping timeouts remains at three seconds. + +=== Plugins + +Plugins implementing custom queries need to implement the `fromXContent(QueryParseContext)` method in their +`QueryParser` subclass rather than `parse`. This method will take care of parsing the query from `XContent` format +into an intermediate query representation that can be streamed between the nodes in binary format, effectively the +query object used in the java api. Also, the query parser needs to implement the `getBuilderPrototype` method that +returns a prototype of the `NamedWriteable` query, which allows to deserialize an incoming query by calling +`readFrom(StreamInput)` against it, which will create a new object, see usages of `Writeable`. The `QueryParser` +also needs to declare the generic type of the query that it supports and it's able to parse. +The query object can then transform itself into a lucene query through the new `toQuery(QueryShardContext)` method, +which returns a lucene query to be executed on the data node. + +Similarly, plugins implementing custom score functions need to implement the `fromXContent(QueryParseContext)` +method in their `ScoreFunctionParser` subclass rather than `parse`. This method will take care of parsing +the function from `XContent` format into an intermediate function representation that can be streamed between +the nodes in binary format, effectively the function object used in the java api. Also, the query parser needs +to implement the `getBuilderPrototype` method that returns a prototype of the `NamedWriteable` function, which +allows to deserialize an incoming function by calling `readFrom(StreamInput)` against it, which will create a +new object, see usages of `Writeable`. The `ScoreFunctionParser` also needs to declare the generic type of the +function that it supports and it's able to parse. The function object can then transform itself into a lucene +function through the new `toFunction(QueryShardContext)` method, which returns a lucene function to be executed +on the data node. + +=== Java-API + +==== BoostingQueryBuilder + +Removed setters for mandatory positive/negative query. Both arguments now have +to be supplied at construction time already and have to be non-null. + +==== SpanContainingQueryBuilder + +Removed setters for mandatory big/little inner span queries. Both arguments now have +to be supplied at construction time already and have to be non-null. Updated +static factory methods in QueryBuilders accordingly. + +==== SpanOrQueryBuilder + +Making sure that query contains at least one clause by making initial clause mandatory +in constructor. + +==== SpanNearQueryBuilder + +Removed setter for mandatory slop parameter, needs to be set in constructor now. Also +making sure that query contains at least one clause by making initial clause mandatory +in constructor. Updated the static factory methods in QueryBuilders accordingly. + +==== SpanNotQueryBuilder + +Removed setter for mandatory include/exclude span query clause, needs to be set in constructor now. +Updated the static factory methods in QueryBuilders and tests accordingly. + +==== SpanWithinQueryBuilder + +Removed setters for mandatory big/little inner span queries. Both arguments now have +to be supplied at construction time already and have to be non-null. Updated +static factory methods in QueryBuilders accordingly. + +==== QueryFilterBuilder + +Removed the setter `queryName(String queryName)` since this field is not supported +in this type of query. Use `FQueryFilterBuilder.queryName(String queryName)` instead +when in need to wrap a named query as a filter. + +==== WrapperQueryBuilder + +Removed `wrapperQueryBuilder(byte[] source, int offset, int length)`. Instead simply +use `wrapperQueryBuilder(byte[] source)`. Updated the static factory methods in +QueryBuilders accordingly. + +==== QueryStringQueryBuilder + +Removed ability to pass in boost value using `field(String field)` method in form e.g. `field^2`. +Use the `field(String, float)` method instead. + +==== Operator + +Removed the enums called `Operator` from `MatchQueryBuilder`, `QueryStringQueryBuilder`, +`SimpleQueryStringBuilder`, and `CommonTermsQueryBuilder` in favour of using the enum +defined in `org.elasticsearch.index.query.Operator` in an effort to consolidate the +codebase and avoid duplication. + +==== queryName and boost support + +Support for `queryName` and `boost` has been streamlined to all of the queries. That is +a breaking change till queries get sent over the network as serialized json rather +than in `Streamable` format. In fact whenever additional fields are added to the json +representation of the query, older nodes might throw error when they find unknown fields. + +==== InnerHitsBuilder + +InnerHitsBuilder now has a dedicated addParentChildInnerHits and addNestedInnerHits methods +to differentiate between inner hits for nested vs. parent / child documents. This change +makes the type / path parameter mandatory. + +==== MatchQueryBuilder + +Moving MatchQueryBuilder.Type and MatchQueryBuilder.ZeroTermsQuery enum to MatchQuery.Type. +Also reusing new Operator enum. + +==== MoreLikeThisQueryBuilder + +Removed `MoreLikeThisQueryBuilder.Item#id(String id)`, `Item#doc(BytesReference doc)`, +`Item#doc(XContentBuilder doc)`. Use provided constructors instead. + +Removed `MoreLikeThisQueryBuilder#addLike` in favor of texts and/or items beeing provided +at construction time. Using arrays there instead of lists now. + +Removed `MoreLikeThisQueryBuilder#addUnlike` in favor to using the `unlike` methods +which take arrays as arguments now rather than the lists used before. + +The deprecated `docs(Item... docs)`, `ignoreLike(Item... docs)`, +`ignoreLike(String... likeText)`, `addItem(Item... likeItems)` have been removed. + +==== GeoDistanceQueryBuilder + +Removing individual setters for lon() and lat() values, both values should be set together + using point(lon, lat). + +==== GeoDistanceRangeQueryBuilder + +Removing setters for to(Object ...) and from(Object ...) in favour of the only two allowed input +arguments (String, Number). Removing setter for center point (point(), geohash()) because parameter +is mandatory and should already be set in constructor. +Also removing setters for lt(), lte(), gt(), gte() since they can all be replaced by equivallent +calls to to/from() and inludeLower()/includeUpper(). + +==== GeoPolygonQueryBuilder + +Require shell of polygon already to be specified in constructor instead of adding it pointwise. +This enables validation, but makes it necessary to remove the addPoint() methods. + +==== MultiMatchQueryBuilder + +Moving MultiMatchQueryBuilder.ZeroTermsQuery enum to MatchQuery.ZeroTermsQuery. +Also reusing new Operator enum. + +Removed ability to pass in boost value using `field(String field)` method in form e.g. `field^2`. +Use the `field(String, float)` method instead. + +==== MissingQueryBuilder + +The two individual setters for existence() and nullValue() were removed in favour of +optional constructor settings in order to better capture and validate their interdependent +settings at construction time. + +==== TermsQueryBuilder + +Remove the setter for `termsLookup()`, making it only possible to either use a TermsLookup object or +individual values at construction time. Also moving individual settings for the TermsLookup (lookupIndex, +lookupType, lookupId, lookupPath) to the separate TermsLookup class, using constructor only and moving +checks for validation there. Removed `TermsLookupQueryBuilder` in favour of `TermsQueryBuilder`. + +==== FunctionScoreQueryBuilder + +`add` methods have been removed, all filters and functions must be provided as constructor arguments by +creating an array of `FunctionScoreQueryBuilder.FilterFunctionBuilder` objects, containing one element +for each filter/function pair. + +`scoreMode` and `boostMode` can only be provided using corresponding enum members instead +of string values: see `FilterFunctionScoreQuery.ScoreMode` and `CombineFunction`. + +`CombineFunction.MULT` has been renamed to `MULTIPLY`. + diff --git a/docs/reference/query-dsl/has-parent-query.asciidoc b/docs/reference/query-dsl/has-parent-query.asciidoc index 5f12e441640..19958bf149b 100644 --- a/docs/reference/query-dsl/has-parent-query.asciidoc +++ b/docs/reference/query-dsl/has-parent-query.asciidoc @@ -24,11 +24,10 @@ in the same manner as the `has_child` query. [float] ==== Scoring capabilities -The `has_parent` also has scoring support. The -supported score types are `score` or `none`. The default is `none` and -this ignores the score from the parent document. The score is in this +The `has_parent` also has scoring support. The default is `false` which +ignores the score from the parent document. The score is in this case equal to the boost on the `has_parent` query (Defaults to 1). If -the score type is set to `score`, then the score of the matching parent +the score is set to `true`, then the score of the matching parent document is aggregated into the child documents belonging to the matching parent document. The score mode can be specified with the `score_mode` field inside the `has_parent` query: @@ -38,7 +37,7 @@ matching parent document. The score mode can be specified with the { "has_parent" : { "parent_type" : "blog", - "score_mode" : "score", + "score" : true, "query" : { "term" : { "tag" : "something" diff --git a/docs/reference/query-dsl/range-query.asciidoc b/docs/reference/query-dsl/range-query.asciidoc index 3c216ca58f7..57fcf75e4f3 100644 --- a/docs/reference/query-dsl/range-query.asciidoc +++ b/docs/reference/query-dsl/range-query.asciidoc @@ -109,12 +109,11 @@ accepts it), or it can be specified as the `time_zone` parameter: "range" : { "timestamp" : { "gte": "2015-01-01 00:00:00", <1> - "lte": "now", + "lte": "now", <2> "time_zone": "+01:00" } } } -------------------------------------------------- <1> This date will be converted to `2014-12-31T23:00:00 UTC`. - - +<2> `now` is not affected by the `time_zone` parameter (dates must be stored as UTC). diff --git a/plugins/delete-by-query/src/main/java/org/elasticsearch/action/deletebyquery/DeleteByQueryRequestBuilder.java b/plugins/delete-by-query/src/main/java/org/elasticsearch/action/deletebyquery/DeleteByQueryRequestBuilder.java index 651dbbffe0c..d30cfaa67f5 100644 --- a/plugins/delete-by-query/src/main/java/org/elasticsearch/action/deletebyquery/DeleteByQueryRequestBuilder.java +++ b/plugins/delete-by-query/src/main/java/org/elasticsearch/action/deletebyquery/DeleteByQueryRequestBuilder.java @@ -63,7 +63,7 @@ public class DeleteByQueryRequestBuilder extends ActionRequestBuilder queryBuilder) { sourceBuilder().setQuery(queryBuilder); return this; } diff --git a/plugins/lang-expression/src/test/java/org/elasticsearch/script/expression/MoreExpressionTests.java b/plugins/lang-expression/src/test/java/org/elasticsearch/script/expression/MoreExpressionTests.java index ceb67d471bf..5966c1d8835 100644 --- a/plugins/lang-expression/src/test/java/org/elasticsearch/script/expression/MoreExpressionTests.java +++ b/plugins/lang-expression/src/test/java/org/elasticsearch/script/expression/MoreExpressionTests.java @@ -27,6 +27,7 @@ import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.action.update.UpdateRequestBuilder; +import org.elasticsearch.common.lucene.search.function.CombineFunction; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.query.QueryBuilders; @@ -115,7 +116,7 @@ public class MoreExpressionTests extends ESIntegTestCase { client().prepareIndex("test", "doc", "3").setSource("text", "hello hello goodebye")); ScoreFunctionBuilder score = ScoreFunctionBuilders.scriptFunction(new Script("1 / _score", ScriptType.INLINE, "expression", null)); SearchRequestBuilder req = client().prepareSearch().setIndices("test"); - req.setQuery(QueryBuilders.functionScoreQuery(QueryBuilders.termQuery("text", "hello"), score).boostMode("replace")); + req.setQuery(QueryBuilders.functionScoreQuery(QueryBuilders.termQuery("text", "hello"), score).boostMode(CombineFunction.REPLACE)); req.setSearchType(SearchType.DFS_QUERY_THEN_FETCH); // make sure DF is consistent SearchResponse rsp = req.get(); assertSearchResponse(rsp); diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/80_date_math_index_names.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/80_date_math_index_names.yaml new file mode 100644 index 00000000000..233b41c6cf7 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/80_date_math_index_names.yaml @@ -0,0 +1,7 @@ +--- +"Missing index with catch": + + - do: + catch: /index=logstash-\d{4}\.\d{2}\.\d{2}/ + search: + index: