Merge branch 'master' into immutable_set_be_gone

This commit is contained in:
Nik Everett 2015-09-25 09:09:24 -04:00
commit 5949b83115
471 changed files with 25508 additions and 13852 deletions

4
Vagrantfile vendored
View File

@ -83,6 +83,10 @@ Vagrant.configure(2) do |config|
# the elasticsearch project called vagrant....
config.vm.synced_folder ".", "/vagrant", disabled: true
config.vm.synced_folder ".", "/elasticsearch"
config.vm.provider "virtualbox" do |v|
# Give the boxes 2GB so they can run our tests if they have to.
v.memory = 2048
end
if Vagrant.has_plugin?("vagrant-cachier")
config.cache.scope = :box
end

View File

@ -76,6 +76,10 @@ public class ExtendedCommonTermsQuery extends CommonTermsQuery {
return lowFreqMinNumShouldMatchSpec;
}
public float getMaxTermFrequency() {
return this.maxTermFrequency;
}
@Override
protected Query newTermQuery(Term term, TermContext context) {
if (fieldType == null) {

View File

@ -21,8 +21,8 @@ package org.apache.lucene.queryparser.classic;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.Query;
import org.elasticsearch.index.query.ExistsQueryParser;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.ExistsQueryBuilder;
import org.elasticsearch.index.query.QueryShardContext;
/**
*
@ -32,7 +32,7 @@ public class ExistsFieldQueryExtension implements FieldQueryExtension {
public static final String NAME = "_exists_";
@Override
public Query query(QueryParseContext parseContext, String queryText) {
return new ConstantScoreQuery(ExistsQueryParser.newFilter(parseContext, queryText, null));
public Query query(QueryShardContext context, String queryText) {
return new ConstantScoreQuery(ExistsQueryBuilder.newFilter(context, queryText));
}
}

View File

@ -20,12 +20,12 @@
package org.apache.lucene.queryparser.classic;
import org.apache.lucene.search.Query;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext;
/**
*
*/
public interface FieldQueryExtension {
Query query(QueryParseContext parseContext, String queryText);
Query query(QueryShardContext context, String queryText);
}

View File

@ -19,31 +19,21 @@
package org.apache.lucene.queryparser.classic;
import com.google.common.collect.ImmutableMap;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.DisjunctionMaxQuery;
import org.apache.lucene.search.FilteredQuery;
import org.apache.lucene.search.FuzzyQuery;
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.MultiPhraseQuery;
import org.apache.lucene.search.PhraseQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.WildcardQuery;
import org.apache.lucene.util.Version;
import org.apache.lucene.search.*;
import org.apache.lucene.util.automaton.RegExp;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.core.DateFieldMapper;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.query.support.QueryParsers;
import com.google.common.collect.ImmutableMap;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
@ -70,53 +60,27 @@ public class MapperQueryParser extends QueryParser {
.build();
}
private final QueryParseContext parseContext;
private final QueryShardContext context;
private QueryParserSettings settings;
private Analyzer quoteAnalyzer;
private boolean forcedAnalyzer;
private boolean forcedQuoteAnalyzer;
private MappedFieldType currentFieldType;
private boolean analyzeWildcard;
private String quoteFieldSuffix;
public MapperQueryParser(QueryParseContext parseContext) {
public MapperQueryParser(QueryShardContext context) {
super(null, null);
this.parseContext = parseContext;
this.context = context;
}
public void reset(QueryParserSettings settings) {
this.settings = settings;
this.field = settings.defaultField();
if (settings.fields() != null) {
if (settings.fields.size() == 1) {
// just mark it as the default field
this.field = settings.fields().get(0);
} else {
// otherwise, we need to have the default field being null...
this.field = null;
}
}
this.forcedAnalyzer = settings.forcedAnalyzer() != null;
this.setAnalyzer(forcedAnalyzer ? settings.forcedAnalyzer() : settings.defaultAnalyzer());
if (settings.forcedQuoteAnalyzer() != null) {
this.forcedQuoteAnalyzer = true;
this.quoteAnalyzer = settings.forcedQuoteAnalyzer();
} else if (forcedAnalyzer) {
this.forcedQuoteAnalyzer = true;
this.quoteAnalyzer = settings.forcedAnalyzer();
if (settings.fieldsAndWeights().isEmpty()) {
this.field = settings.defaultField();
} else if (settings.fieldsAndWeights().size() == 1) {
this.field = settings.fieldsAndWeights().keySet().iterator().next();
} else {
this.forcedAnalyzer = false;
this.quoteAnalyzer = settings.defaultQuoteAnalyzer();
this.field = null;
}
this.quoteFieldSuffix = settings.quoteFieldSuffix();
setAnalyzer(settings.analyzer());
setMultiTermRewriteMethod(settings.rewriteMethod());
setEnablePositionIncrements(settings.enablePositionIncrements());
setAutoGeneratePhraseQueries(settings.autoGeneratePhraseQueries());
@ -125,10 +89,9 @@ public class MapperQueryParser extends QueryParser {
setLowercaseExpandedTerms(settings.lowercaseExpandedTerms());
setPhraseSlop(settings.phraseSlop());
setDefaultOperator(settings.defaultOperator());
setFuzzyMinSim(settings.getFuzziness().asFloat());
setFuzzyMinSim(settings.fuzziness().asFloat());
setFuzzyPrefixLength(settings.fuzzyPrefixLength());
setLocale(settings.locale());
this.analyzeWildcard = settings.analyzeWildcard();
}
/**
@ -162,7 +125,7 @@ public class MapperQueryParser extends QueryParser {
public Query getFieldQuery(String field, String queryText, boolean quoted) throws ParseException {
FieldQueryExtension fieldQueryExtension = fieldQueryExtensions.get(field);
if (fieldQueryExtension != null) {
return fieldQueryExtension.query(parseContext, queryText);
return fieldQueryExtension.query(context, queryText);
}
Collection<String> fields = extractMultiFields(field);
if (fields != null) {
@ -224,29 +187,29 @@ public class MapperQueryParser extends QueryParser {
Analyzer oldAnalyzer = getAnalyzer();
try {
if (quoted) {
setAnalyzer(quoteAnalyzer);
if (quoteFieldSuffix != null) {
currentFieldType = parseContext.fieldMapper(field + quoteFieldSuffix);
setAnalyzer(settings.quoteAnalyzer());
if (settings.quoteFieldSuffix() != null) {
currentFieldType = context.fieldMapper(field + settings.quoteFieldSuffix());
}
}
if (currentFieldType == null) {
currentFieldType = parseContext.fieldMapper(field);
currentFieldType = context.fieldMapper(field);
}
if (currentFieldType != null) {
if (quoted) {
if (!forcedQuoteAnalyzer) {
setAnalyzer(parseContext.getSearchQuoteAnalyzer(currentFieldType));
if (!settings.forceQuoteAnalyzer()) {
setAnalyzer(context.getSearchQuoteAnalyzer(currentFieldType));
}
} else {
if (!forcedAnalyzer) {
setAnalyzer(parseContext.getSearchAnalyzer(currentFieldType));
if (!settings.forceAnalyzer()) {
setAnalyzer(context.getSearchAnalyzer(currentFieldType));
}
}
if (currentFieldType != null) {
Query query = null;
if (currentFieldType.useTermQueryWithQueryString()) {
try {
query = currentFieldType.termQuery(queryText, parseContext);
query = currentFieldType.termQuery(queryText, context);
} catch (RuntimeException e) {
if (settings.lenient()) {
return null;
@ -357,7 +320,7 @@ public class MapperQueryParser extends QueryParser {
}
private Query getRangeQuerySingle(String field, String part1, String part2, boolean startInclusive, boolean endInclusive) {
currentFieldType = parseContext.fieldMapper(field);
currentFieldType = context.fieldMapper(field);
if (currentFieldType != null) {
if (lowercaseExpandedTerms && !currentFieldType.isNumeric()) {
part1 = part1 == null ? null : part1.toLowerCase(locale);
@ -422,7 +385,7 @@ public class MapperQueryParser extends QueryParser {
}
private Query getFuzzyQuerySingle(String field, String termStr, String minSimilarity) throws ParseException {
currentFieldType = parseContext.fieldMapper(field);
currentFieldType = context.fieldMapper(field);
if (currentFieldType != null) {
try {
return currentFieldType.fuzzyQuery(termStr, Fuzziness.build(minSimilarity), fuzzyPrefixLength, settings.fuzzyMaxExpansions(), FuzzyQuery.defaultTranspositions);
@ -492,14 +455,14 @@ public class MapperQueryParser extends QueryParser {
currentFieldType = null;
Analyzer oldAnalyzer = getAnalyzer();
try {
currentFieldType = parseContext.fieldMapper(field);
currentFieldType = context.fieldMapper(field);
if (currentFieldType != null) {
if (!forcedAnalyzer) {
setAnalyzer(parseContext.getSearchAnalyzer(currentFieldType));
if (!settings.forceAnalyzer()) {
setAnalyzer(context.getSearchAnalyzer(currentFieldType));
}
Query query = null;
if (currentFieldType.useTermQueryWithQueryString()) {
query = currentFieldType.prefixQuery(termStr, multiTermRewriteMethod, parseContext);
query = currentFieldType.prefixQuery(termStr, multiTermRewriteMethod, context);
}
if (query == null) {
query = getPossiblyAnalyzedPrefixQuery(currentFieldType.names().indexName(), termStr);
@ -518,7 +481,7 @@ public class MapperQueryParser extends QueryParser {
}
private Query getPossiblyAnalyzedPrefixQuery(String field, String termStr) throws ParseException {
if (!analyzeWildcard) {
if (!settings.analyzeWildcard()) {
return super.getPrefixQuery(field, termStr);
}
// get Analyzer from superclass and tokenize the term
@ -556,16 +519,7 @@ public class MapperQueryParser extends QueryParser {
clauses.add(new BooleanClause(super.getPrefixQuery(field, token), BooleanClause.Occur.SHOULD));
}
return getBooleanQuery(clauses, true);
//return super.getPrefixQuery(field, termStr);
/* this means that the analyzer used either added or consumed
* (common for a stemmer) tokens, and we can't build a PrefixQuery */
// throw new ParseException("Cannot build PrefixQuery with analyzer "
// + getAnalyzer().getClass()
// + (tlist.size() > 1 ? " - token(s) added" : " - token consumed"));
}
}
@Override
@ -584,7 +538,7 @@ public class MapperQueryParser extends QueryParser {
return newMatchAllDocsQuery();
}
// effectively, we check if a field exists or not
return fieldQueryExtensions.get(ExistsFieldQueryExtension.NAME).query(parseContext, actualField);
return fieldQueryExtensions.get(ExistsFieldQueryExtension.NAME).query(context, actualField);
}
}
if (lowercaseExpandedTerms) {
@ -633,10 +587,10 @@ public class MapperQueryParser extends QueryParser {
currentFieldType = null;
Analyzer oldAnalyzer = getAnalyzer();
try {
currentFieldType = parseContext.fieldMapper(field);
currentFieldType = context.fieldMapper(field);
if (currentFieldType != null) {
if (!forcedAnalyzer) {
setAnalyzer(parseContext.getSearchAnalyzer(currentFieldType));
if (!settings.forceAnalyzer()) {
setAnalyzer(context.getSearchAnalyzer(currentFieldType));
}
indexedNameField = currentFieldType.names().indexName();
return getPossiblyAnalyzedWildcardQuery(indexedNameField, termStr);
@ -653,7 +607,7 @@ public class MapperQueryParser extends QueryParser {
}
private Query getPossiblyAnalyzedWildcardQuery(String field, String termStr) throws ParseException {
if (!analyzeWildcard) {
if (!settings.analyzeWildcard()) {
return super.getWildcardQuery(field, termStr);
}
boolean isWithinToken = (!termStr.startsWith("?") && !termStr.startsWith("*"));
@ -765,14 +719,14 @@ public class MapperQueryParser extends QueryParser {
currentFieldType = null;
Analyzer oldAnalyzer = getAnalyzer();
try {
currentFieldType = parseContext.fieldMapper(field);
currentFieldType = context.fieldMapper(field);
if (currentFieldType != null) {
if (!forcedAnalyzer) {
setAnalyzer(parseContext.getSearchAnalyzer(currentFieldType));
if (!settings.forceAnalyzer()) {
setAnalyzer(context.getSearchAnalyzer(currentFieldType));
}
Query query = null;
if (currentFieldType.useTermQueryWithQueryString()) {
query = currentFieldType.regexpQuery(termStr, RegExp.ALL, maxDeterminizedStates, multiTermRewriteMethod, parseContext);
query = currentFieldType.regexpQuery(termStr, RegExp.ALL, maxDeterminizedStates, multiTermRewriteMethod, context);
}
if (query == null) {
query = super.getRegexpQuery(field, termStr);
@ -800,9 +754,9 @@ public class MapperQueryParser extends QueryParser {
}
private void applyBoost(String field, Query q) {
if (settings.boosts() != null) {
float boost = settings.boosts().getOrDefault(field, 1f);
q.setBoost(boost);
Float fieldBoost = settings.fieldsAndWeights().get(field);
if (fieldBoost != null) {
q.setBoost(fieldBoost);
}
}
@ -828,11 +782,11 @@ public class MapperQueryParser extends QueryParser {
}
private Collection<String> extractMultiFields(String field) {
Collection<String> fields = null;
Collection<String> fields;
if (field != null) {
fields = parseContext.simpleMatchToIndexNames(field);
fields = context.simpleMatchToIndexNames(field);
} else {
fields = settings.fields();
fields = settings.fieldsAndWeights().keySet();
}
return fields;
}

View File

@ -21,8 +21,8 @@ package org.apache.lucene.queryparser.classic;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.Query;
import org.elasticsearch.index.query.MissingQueryParser;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.MissingQueryBuilder;
import org.elasticsearch.index.query.QueryShardContext;
/**
*
@ -32,8 +32,11 @@ public class MissingFieldQueryExtension implements FieldQueryExtension {
public static final String NAME = "_missing_";
@Override
public Query query(QueryParseContext parseContext, String queryText) {
return new ConstantScoreQuery(MissingQueryParser.newFilter(parseContext, queryText,
MissingQueryParser.DEFAULT_EXISTENCE_VALUE, MissingQueryParser.DEFAULT_NULL_VALUE, null));
public Query query(QueryShardContext context, String queryText) {
Query query = MissingQueryBuilder.newFilter(context, queryText, MissingQueryBuilder.DEFAULT_EXISTENCE_VALUE, MissingQueryBuilder.DEFAULT_NULL_VALUE);
if (query != null) {
return new ConstantScoreQuery(query);
}
return null;
}
}

View File

@ -19,66 +19,74 @@
package org.apache.lucene.queryparser.classic;
import com.carrotsearch.hppc.ObjectFloatHashMap;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.search.FuzzyQuery;
import org.apache.lucene.search.MultiTermQuery;
import org.apache.lucene.util.automaton.Operations;
import org.elasticsearch.common.unit.Fuzziness;
import org.joda.time.DateTimeZone;
import java.util.List;
import java.util.Locale;
import java.util.Map;
/**
*
* Encapsulates settings that affect query_string parsing via {@link MapperQueryParser}
*/
public class QueryParserSettings {
public static final boolean DEFAULT_ALLOW_LEADING_WILDCARD = true;
public static final boolean DEFAULT_ANALYZE_WILDCARD = false;
public static final float DEFAULT_BOOST = 1.f;
private final String queryString;
private String queryString;
private String defaultField;
private float boost = DEFAULT_BOOST;
private MapperQueryParser.Operator defaultOperator = QueryParser.Operator.OR;
private boolean autoGeneratePhraseQueries = false;
private boolean allowLeadingWildcard = DEFAULT_ALLOW_LEADING_WILDCARD;
private boolean lowercaseExpandedTerms = true;
private boolean enablePositionIncrements = true;
private int phraseSlop = 0;
private Fuzziness fuzziness = Fuzziness.AUTO;
private int fuzzyPrefixLength = FuzzyQuery.defaultPrefixLength;
private int fuzzyMaxExpansions = FuzzyQuery.defaultMaxExpansions;
private int maxDeterminizedStates = Operations.DEFAULT_MAX_DETERMINIZED_STATES;
private MultiTermQuery.RewriteMethod fuzzyRewriteMethod = null;
private boolean analyzeWildcard = DEFAULT_ANALYZE_WILDCARD;
private boolean escape = false;
private Analyzer defaultAnalyzer = null;
private Analyzer defaultQuoteAnalyzer = null;
private Analyzer forcedAnalyzer = null;
private Analyzer forcedQuoteAnalyzer = null;
private String quoteFieldSuffix = null;
private MultiTermQuery.RewriteMethod rewriteMethod = MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE;
private String minimumShouldMatch;
private boolean lenient;
private Map<String, Float> fieldsAndWeights;
private QueryParser.Operator defaultOperator;
private Analyzer analyzer;
private boolean forceAnalyzer;
private Analyzer quoteAnalyzer;
private boolean forceQuoteAnalyzer;
private String quoteFieldSuffix;
private boolean autoGeneratePhraseQueries;
private boolean allowLeadingWildcard;
private boolean analyzeWildcard;
private boolean lowercaseExpandedTerms;
private boolean enablePositionIncrements;
private Locale locale;
private Fuzziness fuzziness;
private int fuzzyPrefixLength;
private int fuzzyMaxExpansions;
private MultiTermQuery.RewriteMethod fuzzyRewriteMethod;
private int phraseSlop;
private boolean useDisMax;
private float tieBreaker;
private MultiTermQuery.RewriteMethod rewriteMethod;
private boolean lenient;
private DateTimeZone timeZone;
List<String> fields = null;
ObjectFloatHashMap<String> boosts = null;
float tieBreaker = 0.0f;
boolean useDisMax = true;
/** To limit effort spent determinizing regexp queries. */
private int maxDeterminizedStates;
public QueryParserSettings(String queryString) {
this.queryString = queryString;
}
public String queryString() {
return queryString;
}
public void queryString(String queryString) {
this.queryString = queryString;
}
public String defaultField() {
return defaultField;
}
@ -87,12 +95,12 @@ public class QueryParserSettings {
this.defaultField = defaultField;
}
public float boost() {
return boost;
public Map<String, Float> fieldsAndWeights() {
return fieldsAndWeights;
}
public void boost(float boost) {
this.boost = boost;
public void fieldsAndWeights(Map<String, Float> fieldsAndWeights) {
this.fieldsAndWeights = fieldsAndWeights;
}
public QueryParser.Operator defaultOperator() {
@ -175,44 +183,40 @@ public class QueryParserSettings {
this.fuzzyRewriteMethod = fuzzyRewriteMethod;
}
public boolean escape() {
return escape;
public void defaultAnalyzer(Analyzer analyzer) {
this.analyzer = analyzer;
this.forceAnalyzer = false;
}
public void escape(boolean escape) {
this.escape = escape;
public void forceAnalyzer(Analyzer analyzer) {
this.analyzer = analyzer;
this.forceAnalyzer = true;
}
public Analyzer defaultAnalyzer() {
return defaultAnalyzer;
public Analyzer analyzer() {
return analyzer;
}
public void defaultAnalyzer(Analyzer defaultAnalyzer) {
this.defaultAnalyzer = defaultAnalyzer;
public boolean forceAnalyzer() {
return forceAnalyzer;
}
public Analyzer defaultQuoteAnalyzer() {
return defaultQuoteAnalyzer;
public void defaultQuoteAnalyzer(Analyzer quoteAnalyzer) {
this.quoteAnalyzer = quoteAnalyzer;
this.forceQuoteAnalyzer = false;
}
public void defaultQuoteAnalyzer(Analyzer defaultAnalyzer) {
this.defaultQuoteAnalyzer = defaultAnalyzer;
public void forceQuoteAnalyzer(Analyzer quoteAnalyzer) {
this.quoteAnalyzer = quoteAnalyzer;
this.forceQuoteAnalyzer = true;
}
public Analyzer forcedAnalyzer() {
return forcedAnalyzer;
public Analyzer quoteAnalyzer() {
return quoteAnalyzer;
}
public void forcedAnalyzer(Analyzer forcedAnalyzer) {
this.forcedAnalyzer = forcedAnalyzer;
}
public Analyzer forcedQuoteAnalyzer() {
return forcedQuoteAnalyzer;
}
public void forcedQuoteAnalyzer(Analyzer forcedAnalyzer) {
this.forcedQuoteAnalyzer = forcedAnalyzer;
public boolean forceQuoteAnalyzer() {
return forceQuoteAnalyzer;
}
public boolean analyzeWildcard() {
@ -231,14 +235,6 @@ public class QueryParserSettings {
this.rewriteMethod = rewriteMethod;
}
public String minimumShouldMatch() {
return this.minimumShouldMatch;
}
public void minimumShouldMatch(String minimumShouldMatch) {
this.minimumShouldMatch = minimumShouldMatch;
}
public void quoteFieldSuffix(String quoteFieldSuffix) {
this.quoteFieldSuffix = quoteFieldSuffix;
}
@ -255,22 +251,6 @@ public class QueryParserSettings {
return this.lenient;
}
public List<String> fields() {
return fields;
}
public void fields(List<String> fields) {
this.fields = fields;
}
public ObjectFloatHashMap<String> boosts() {
return boosts;
}
public void boosts(ObjectFloatHashMap<String> boosts) {
this.boosts = boosts;
}
public float tieBreaker() {
return tieBreaker;
}
@ -303,97 +283,11 @@ public class QueryParserSettings {
return this.timeZone;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
QueryParserSettings that = (QueryParserSettings) o;
if (autoGeneratePhraseQueries != that.autoGeneratePhraseQueries()) return false;
if (maxDeterminizedStates != that.maxDeterminizedStates()) return false;
if (allowLeadingWildcard != that.allowLeadingWildcard) return false;
if (Float.compare(that.boost, boost) != 0) return false;
if (enablePositionIncrements != that.enablePositionIncrements) return false;
if (escape != that.escape) return false;
if (analyzeWildcard != that.analyzeWildcard) return false;
if (fuzziness != null ? fuzziness.equals(that.fuzziness) == false : fuzziness != null) return false;
if (fuzzyPrefixLength != that.fuzzyPrefixLength) return false;
if (fuzzyMaxExpansions != that.fuzzyMaxExpansions) return false;
if (fuzzyRewriteMethod != null ? !fuzzyRewriteMethod.equals(that.fuzzyRewriteMethod) : that.fuzzyRewriteMethod != null)
return false;
if (lowercaseExpandedTerms != that.lowercaseExpandedTerms) return false;
if (phraseSlop != that.phraseSlop) return false;
if (defaultAnalyzer != null ? !defaultAnalyzer.equals(that.defaultAnalyzer) : that.defaultAnalyzer != null)
return false;
if (defaultQuoteAnalyzer != null ? !defaultQuoteAnalyzer.equals(that.defaultQuoteAnalyzer) : that.defaultQuoteAnalyzer != null)
return false;
if (forcedAnalyzer != null ? !forcedAnalyzer.equals(that.forcedAnalyzer) : that.forcedAnalyzer != null)
return false;
if (forcedQuoteAnalyzer != null ? !forcedQuoteAnalyzer.equals(that.forcedQuoteAnalyzer) : that.forcedQuoteAnalyzer != null)
return false;
if (defaultField != null ? !defaultField.equals(that.defaultField) : that.defaultField != null) return false;
if (defaultOperator != that.defaultOperator) return false;
if (queryString != null ? !queryString.equals(that.queryString) : that.queryString != null) return false;
if (rewriteMethod != null ? !rewriteMethod.equals(that.rewriteMethod) : that.rewriteMethod != null)
return false;
if (minimumShouldMatch != null ? !minimumShouldMatch.equals(that.minimumShouldMatch) : that.minimumShouldMatch != null)
return false;
if (quoteFieldSuffix != null ? !quoteFieldSuffix.equals(that.quoteFieldSuffix) : that.quoteFieldSuffix != null)
return false;
if (lenient != that.lenient) {
return false;
}
if (locale != null ? !locale.equals(that.locale) : that.locale != null) {
return false;
}
if (timeZone != null ? !timeZone.equals(that.timeZone) : that.timeZone != null) {
return false;
}
if (Float.compare(that.tieBreaker, tieBreaker) != 0) return false;
if (useDisMax != that.useDisMax) return false;
if (boosts != null ? !boosts.equals(that.boosts) : that.boosts != null) return false;
if (fields != null ? !fields.equals(that.fields) : that.fields != null) return false;
return true;
}
@Override
public int hashCode() {
int result = queryString != null ? queryString.hashCode() : 0;
result = 31 * result + (defaultField != null ? defaultField.hashCode() : 0);
result = 31 * result + (boost != +0.0f ? Float.floatToIntBits(boost) : 0);
result = 31 * result + (defaultOperator != null ? defaultOperator.hashCode() : 0);
result = 31 * result + (autoGeneratePhraseQueries ? 1 : 0);
result = 31 * result + maxDeterminizedStates;
result = 31 * result + (allowLeadingWildcard ? 1 : 0);
result = 31 * result + (lowercaseExpandedTerms ? 1 : 0);
result = 31 * result + (enablePositionIncrements ? 1 : 0);
result = 31 * result + phraseSlop;
result = 31 * result + (fuzziness.hashCode());
result = 31 * result + fuzzyPrefixLength;
result = 31 * result + (escape ? 1 : 0);
result = 31 * result + (defaultAnalyzer != null ? defaultAnalyzer.hashCode() : 0);
result = 31 * result + (defaultQuoteAnalyzer != null ? defaultQuoteAnalyzer.hashCode() : 0);
result = 31 * result + (forcedAnalyzer != null ? forcedAnalyzer.hashCode() : 0);
result = 31 * result + (forcedQuoteAnalyzer != null ? forcedQuoteAnalyzer.hashCode() : 0);
result = 31 * result + (analyzeWildcard ? 1 : 0);
result = 31 * result + (fields != null ? fields.hashCode() : 0);
result = 31 * result + (boosts != null ? boosts.hashCode() : 0);
result = 31 * result + (tieBreaker != +0.0f ? Float.floatToIntBits(tieBreaker) : 0);
result = 31 * result + (useDisMax ? 1 : 0);
result = 31 * result + (locale != null ? locale.hashCode() : 0);
result = 31 * result + (timeZone != null ? timeZone.hashCode() : 0);
return result;
}
public void setFuzziness(Fuzziness fuzziness) {
public void fuzziness(Fuzziness fuzziness) {
this.fuzziness = fuzziness;
}
public Fuzziness getFuzziness() {
public Fuzziness fuzziness() {
return fuzziness;
}
}

View File

@ -463,153 +463,151 @@ public class ElasticsearchException extends RuntimeException implements ToXConte
// change due to refactorings etc. like renaming we have to keep the ordinal <--> class mapping
// to deserialize the exception coming from another node or from an corruption marker on
// a corrupted index.
// NOTE: ONLY APPEND TO THE END and NEVER REMOVE EXCEPTIONS IN MINOR VERSIONS
final Map<Class<? extends ElasticsearchException>, Integer> exceptions = new HashMap<>();
exceptions.put(org.elasticsearch.index.snapshots.IndexShardSnapshotFailedException.class, 0);
exceptions.put(org.elasticsearch.search.dfs.DfsPhaseExecutionException.class, 1);
exceptions.put(org.elasticsearch.common.util.CancellableThreads.ExecutionCancelledException.class, 2);
exceptions.put(org.elasticsearch.discovery.MasterNotDiscoveredException.class, 3);
exceptions.put(org.elasticsearch.ElasticsearchSecurityException.class, 4);
exceptions.put(org.elasticsearch.index.snapshots.IndexShardRestoreException.class, 5);
exceptions.put(org.elasticsearch.indices.IndexClosedException.class, 6);
exceptions.put(org.elasticsearch.http.BindHttpException.class, 7);
exceptions.put(org.elasticsearch.action.search.ReduceSearchPhaseException.class, 8);
exceptions.put(org.elasticsearch.node.NodeClosedException.class, 9);
exceptions.put(org.elasticsearch.index.engine.SnapshotFailedEngineException.class, 10);
exceptions.put(org.elasticsearch.index.shard.ShardNotFoundException.class, 11);
exceptions.put(org.elasticsearch.transport.ConnectTransportException.class, 12);
exceptions.put(org.elasticsearch.transport.NotSerializableTransportException.class, 13);
exceptions.put(org.elasticsearch.transport.ResponseHandlerFailureTransportException.class, 14);
exceptions.put(org.elasticsearch.indices.IndexCreationException.class, 15);
exceptions.put(org.elasticsearch.index.IndexNotFoundException.class, 16);
exceptions.put(org.elasticsearch.cluster.routing.IllegalShardRoutingStateException.class, 17);
exceptions.put(org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException.class, 18);
exceptions.put(org.elasticsearch.ResourceNotFoundException.class, 19);
exceptions.put(org.elasticsearch.transport.ActionTransportException.class, 20);
exceptions.put(org.elasticsearch.ElasticsearchGenerationException.class, 21);
exceptions.put(org.elasticsearch.index.engine.CreateFailedEngineException.class, 22);
exceptions.put(org.elasticsearch.index.shard.IndexShardStartedException.class, 23);
exceptions.put(org.elasticsearch.search.SearchContextMissingException.class, 24);
exceptions.put(org.elasticsearch.script.ScriptException.class, 25);
exceptions.put(org.elasticsearch.index.shard.TranslogRecoveryPerformer.BatchOperationException.class, 26);
exceptions.put(org.elasticsearch.snapshots.SnapshotCreationException.class, 27);
exceptions.put(org.elasticsearch.index.engine.DeleteFailedEngineException.class, 28);
exceptions.put(org.elasticsearch.index.engine.DocumentMissingException.class, 29);
exceptions.put(org.elasticsearch.snapshots.SnapshotException.class, 30);
exceptions.put(org.elasticsearch.indices.InvalidAliasNameException.class, 31);
exceptions.put(org.elasticsearch.indices.InvalidIndexNameException.class, 32);
exceptions.put(org.elasticsearch.indices.IndexPrimaryShardNotAllocatedException.class, 33);
exceptions.put(org.elasticsearch.transport.TransportException.class, 34);
exceptions.put(org.elasticsearch.ElasticsearchParseException.class, 35);
exceptions.put(org.elasticsearch.search.SearchException.class, 36);
exceptions.put(org.elasticsearch.index.mapper.MapperException.class, 37);
exceptions.put(org.elasticsearch.indices.InvalidTypeNameException.class, 38);
exceptions.put(org.elasticsearch.snapshots.SnapshotRestoreException.class, 39);
exceptions.put(org.elasticsearch.common.ParsingException.class, 40);
exceptions.put(org.elasticsearch.index.shard.IndexShardClosedException.class, 41);
exceptions.put(org.elasticsearch.script.expression.ExpressionScriptCompilationException.class, 42);
exceptions.put(org.elasticsearch.indices.recovery.RecoverFilesRecoveryException.class, 43);
exceptions.put(org.elasticsearch.index.translog.TruncatedTranslogException.class, 44);
exceptions.put(org.elasticsearch.indices.recovery.RecoveryFailedException.class, 45);
exceptions.put(org.elasticsearch.index.shard.IndexShardRelocatedException.class, 46);
exceptions.put(org.elasticsearch.transport.NodeShouldNotConnectException.class, 47);
exceptions.put(org.elasticsearch.indices.IndexTemplateAlreadyExistsException.class, 48);
exceptions.put(org.elasticsearch.index.translog.TranslogCorruptedException.class, 49);
exceptions.put(org.elasticsearch.cluster.block.ClusterBlockException.class, 50);
exceptions.put(org.elasticsearch.search.fetch.FetchPhaseExecutionException.class, 51);
exceptions.put(org.elasticsearch.index.IndexShardAlreadyExistsException.class, 52);
exceptions.put(org.elasticsearch.index.engine.VersionConflictEngineException.class, 53);
exceptions.put(org.elasticsearch.index.engine.EngineException.class, 54);
exceptions.put(org.elasticsearch.index.engine.DocumentAlreadyExistsException.class, 55);
exceptions.put(org.elasticsearch.action.NoSuchNodeException.class, 56);
exceptions.put(org.elasticsearch.common.settings.SettingsException.class, 57);
exceptions.put(org.elasticsearch.indices.IndexTemplateMissingException.class, 58);
exceptions.put(org.elasticsearch.transport.SendRequestTransportException.class, 59);
exceptions.put(org.elasticsearch.common.util.concurrent.EsRejectedExecutionException.class, 60);
exceptions.put(org.elasticsearch.common.lucene.Lucene.EarlyTerminationException.class, 61);
exceptions.put(org.elasticsearch.cluster.routing.RoutingValidationException.class, 62);
exceptions.put(org.elasticsearch.common.io.stream.NotSerializableExceptionWrapper.class, 63);
exceptions.put(org.elasticsearch.indices.AliasFilterParsingException.class, 64);
exceptions.put(org.elasticsearch.index.engine.DeleteByQueryFailedEngineException.class, 65);
exceptions.put(org.elasticsearch.gateway.GatewayException.class, 66);
exceptions.put(org.elasticsearch.index.shard.IndexShardNotRecoveringException.class, 67);
exceptions.put(org.elasticsearch.http.HttpException.class, 68);
exceptions.put(org.elasticsearch.ElasticsearchException.class, 69);
exceptions.put(org.elasticsearch.snapshots.SnapshotMissingException.class, 70);
exceptions.put(org.elasticsearch.action.PrimaryMissingActionException.class, 71);
exceptions.put(org.elasticsearch.action.FailedNodeException.class, 72);
exceptions.put(org.elasticsearch.search.SearchParseException.class, 73);
exceptions.put(org.elasticsearch.snapshots.ConcurrentSnapshotExecutionException.class, 74);
exceptions.put(org.elasticsearch.common.blobstore.BlobStoreException.class, 75);
exceptions.put(org.elasticsearch.cluster.IncompatibleClusterStateVersionException.class, 76);
exceptions.put(org.elasticsearch.index.engine.RecoveryEngineException.class, 77);
exceptions.put(org.elasticsearch.common.util.concurrent.UncategorizedExecutionException.class, 78);
exceptions.put(org.elasticsearch.action.TimestampParsingException.class, 79);
exceptions.put(org.elasticsearch.action.RoutingMissingException.class, 80);
exceptions.put(org.elasticsearch.index.engine.IndexFailedEngineException.class, 81);
exceptions.put(org.elasticsearch.index.snapshots.IndexShardRestoreFailedException.class, 82);
exceptions.put(org.elasticsearch.repositories.RepositoryException.class, 83);
exceptions.put(org.elasticsearch.transport.ReceiveTimeoutTransportException.class, 84);
exceptions.put(org.elasticsearch.transport.NodeDisconnectedException.class, 85);
exceptions.put(org.elasticsearch.index.AlreadyExpiredException.class, 86);
exceptions.put(org.elasticsearch.search.aggregations.AggregationExecutionException.class, 87);
exceptions.put(org.elasticsearch.index.mapper.MergeMappingException.class, 88);
exceptions.put(org.elasticsearch.indices.InvalidIndexTemplateException.class, 89);
exceptions.put(org.elasticsearch.percolator.PercolateException.class, 90);
exceptions.put(org.elasticsearch.index.engine.RefreshFailedEngineException.class, 91);
exceptions.put(org.elasticsearch.search.aggregations.AggregationInitializationException.class, 92);
exceptions.put(org.elasticsearch.indices.recovery.DelayRecoveryException.class, 93);
exceptions.put(org.elasticsearch.search.warmer.IndexWarmerMissingException.class, 94);
exceptions.put(org.elasticsearch.client.transport.NoNodeAvailableException.class, 95);
exceptions.put(org.elasticsearch.script.groovy.GroovyScriptCompilationException.class, 96);
exceptions.put(org.elasticsearch.snapshots.InvalidSnapshotNameException.class, 97);
exceptions.put(org.elasticsearch.index.shard.IllegalIndexShardStateException.class, 98);
exceptions.put(org.elasticsearch.index.snapshots.IndexShardSnapshotException.class, 99);
exceptions.put(org.elasticsearch.index.shard.IndexShardNotStartedException.class, 100);
exceptions.put(org.elasticsearch.action.search.SearchPhaseExecutionException.class, 101);
exceptions.put(org.elasticsearch.transport.ActionNotFoundTransportException.class, 102);
exceptions.put(org.elasticsearch.transport.TransportSerializationException.class, 103);
exceptions.put(org.elasticsearch.transport.RemoteTransportException.class, 104);
exceptions.put(org.elasticsearch.index.engine.EngineCreationFailureException.class, 105);
exceptions.put(org.elasticsearch.cluster.routing.RoutingException.class, 106);
exceptions.put(org.elasticsearch.index.shard.IndexShardRecoveryException.class, 107);
exceptions.put(org.elasticsearch.repositories.RepositoryMissingException.class, 108);
exceptions.put(org.elasticsearch.script.expression.ExpressionScriptExecutionException.class, 109);
exceptions.put(org.elasticsearch.index.percolator.PercolatorException.class, 110);
exceptions.put(org.elasticsearch.index.engine.DocumentSourceMissingException.class, 111);
exceptions.put(org.elasticsearch.index.engine.FlushNotAllowedEngineException.class, 112);
exceptions.put(org.elasticsearch.common.settings.NoClassSettingsException.class, 113);
exceptions.put(org.elasticsearch.transport.BindTransportException.class, 114);
exceptions.put(org.elasticsearch.rest.action.admin.indices.alias.delete.AliasesNotFoundException.class, 115);
exceptions.put(org.elasticsearch.index.shard.IndexShardRecoveringException.class, 116);
exceptions.put(org.elasticsearch.index.translog.TranslogException.class, 117);
exceptions.put(org.elasticsearch.cluster.metadata.ProcessClusterEventTimeoutException.class, 118);
exceptions.put(org.elasticsearch.action.support.replication.TransportReplicationAction.RetryOnPrimaryException.class, 119);
exceptions.put(org.elasticsearch.ElasticsearchTimeoutException.class, 120);
exceptions.put(org.elasticsearch.search.query.QueryPhaseExecutionException.class, 121);
exceptions.put(org.elasticsearch.repositories.RepositoryVerificationException.class, 122);
exceptions.put(org.elasticsearch.search.aggregations.InvalidAggregationPathException.class, 123);
exceptions.put(org.elasticsearch.script.groovy.GroovyScriptExecutionException.class, 124);
exceptions.put(org.elasticsearch.indices.IndexAlreadyExistsException.class, 125);
exceptions.put(org.elasticsearch.script.Script.ScriptParseException.class, 126);
exceptions.put(org.elasticsearch.transport.netty.SizeHeaderFrameDecoder.HttpOnTransportException.class, 127);
exceptions.put(org.elasticsearch.index.mapper.MapperParsingException.class, 128);
exceptions.put(org.elasticsearch.search.SearchContextException.class, 129);
exceptions.put(org.elasticsearch.search.builder.SearchSourceBuilderException.class, 130);
exceptions.put(org.elasticsearch.index.engine.EngineClosedException.class, 131);
exceptions.put(org.elasticsearch.action.NoShardAvailableActionException.class, 132);
exceptions.put(org.elasticsearch.action.UnavailableShardsException.class, 133);
exceptions.put(org.elasticsearch.index.engine.FlushFailedEngineException.class, 134);
exceptions.put(org.elasticsearch.common.breaker.CircuitBreakingException.class, 135);
exceptions.put(org.elasticsearch.transport.NodeNotConnectedException.class, 136);
exceptions.put(org.elasticsearch.index.mapper.StrictDynamicMappingException.class, 137);
exceptions.put(org.elasticsearch.action.support.replication.TransportReplicationAction.RetryOnReplicaException.class, 138);
exceptions.put(org.elasticsearch.indices.TypeMissingException.class, 139);
exceptions.put(org.elasticsearch.index.snapshots.IndexShardSnapshotFailedException.class, exceptions.size());
exceptions.put(org.elasticsearch.search.dfs.DfsPhaseExecutionException.class, exceptions.size());
exceptions.put(org.elasticsearch.common.util.CancellableThreads.ExecutionCancelledException.class, exceptions.size());
exceptions.put(org.elasticsearch.discovery.MasterNotDiscoveredException.class, exceptions.size());
exceptions.put(org.elasticsearch.ElasticsearchSecurityException.class, exceptions.size());
exceptions.put(org.elasticsearch.index.snapshots.IndexShardRestoreException.class, exceptions.size());
exceptions.put(org.elasticsearch.indices.IndexClosedException.class, exceptions.size());
exceptions.put(org.elasticsearch.http.BindHttpException.class, exceptions.size());
exceptions.put(org.elasticsearch.action.search.ReduceSearchPhaseException.class, exceptions.size());
exceptions.put(org.elasticsearch.node.NodeClosedException.class, exceptions.size());
exceptions.put(org.elasticsearch.index.engine.SnapshotFailedEngineException.class, exceptions.size());
exceptions.put(org.elasticsearch.index.shard.ShardNotFoundException.class, exceptions.size());
exceptions.put(org.elasticsearch.transport.ConnectTransportException.class, exceptions.size());
exceptions.put(org.elasticsearch.transport.NotSerializableTransportException.class, exceptions.size());
exceptions.put(org.elasticsearch.transport.ResponseHandlerFailureTransportException.class, exceptions.size());
exceptions.put(org.elasticsearch.indices.IndexCreationException.class, exceptions.size());
exceptions.put(org.elasticsearch.index.IndexNotFoundException.class, exceptions.size());
exceptions.put(org.elasticsearch.cluster.routing.IllegalShardRoutingStateException.class, exceptions.size());
exceptions.put(org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException.class, exceptions.size());
exceptions.put(org.elasticsearch.ResourceNotFoundException.class, exceptions.size());
exceptions.put(org.elasticsearch.transport.ActionTransportException.class, exceptions.size());
exceptions.put(org.elasticsearch.ElasticsearchGenerationException.class, exceptions.size());
exceptions.put(org.elasticsearch.index.engine.CreateFailedEngineException.class, exceptions.size());
exceptions.put(org.elasticsearch.index.shard.IndexShardStartedException.class, exceptions.size());
exceptions.put(org.elasticsearch.search.SearchContextMissingException.class, exceptions.size());
exceptions.put(org.elasticsearch.script.ScriptException.class, exceptions.size());
exceptions.put(org.elasticsearch.index.shard.TranslogRecoveryPerformer.BatchOperationException.class, exceptions.size());
exceptions.put(org.elasticsearch.snapshots.SnapshotCreationException.class, exceptions.size());
exceptions.put(org.elasticsearch.index.engine.DeleteFailedEngineException.class, exceptions.size());
exceptions.put(org.elasticsearch.index.engine.DocumentMissingException.class, exceptions.size());
exceptions.put(org.elasticsearch.snapshots.SnapshotException.class, exceptions.size());
exceptions.put(org.elasticsearch.indices.InvalidAliasNameException.class, exceptions.size());
exceptions.put(org.elasticsearch.indices.InvalidIndexNameException.class, exceptions.size());
exceptions.put(org.elasticsearch.indices.IndexPrimaryShardNotAllocatedException.class, exceptions.size());
exceptions.put(org.elasticsearch.transport.TransportException.class, exceptions.size());
exceptions.put(org.elasticsearch.ElasticsearchParseException.class, exceptions.size());
exceptions.put(org.elasticsearch.search.SearchException.class, exceptions.size());
exceptions.put(org.elasticsearch.index.mapper.MapperException.class, exceptions.size());
exceptions.put(org.elasticsearch.indices.InvalidTypeNameException.class, exceptions.size());
exceptions.put(org.elasticsearch.snapshots.SnapshotRestoreException.class, exceptions.size());
exceptions.put(org.elasticsearch.common.ParsingException.class, exceptions.size());
exceptions.put(org.elasticsearch.index.shard.IndexShardClosedException.class, exceptions.size());
exceptions.put(org.elasticsearch.indices.recovery.RecoverFilesRecoveryException.class, exceptions.size());
exceptions.put(org.elasticsearch.index.translog.TruncatedTranslogException.class, exceptions.size());
exceptions.put(org.elasticsearch.indices.recovery.RecoveryFailedException.class, exceptions.size());
exceptions.put(org.elasticsearch.index.shard.IndexShardRelocatedException.class, exceptions.size());
exceptions.put(org.elasticsearch.transport.NodeShouldNotConnectException.class, exceptions.size());
exceptions.put(org.elasticsearch.indices.IndexTemplateAlreadyExistsException.class, exceptions.size());
exceptions.put(org.elasticsearch.index.translog.TranslogCorruptedException.class, exceptions.size());
exceptions.put(org.elasticsearch.cluster.block.ClusterBlockException.class, exceptions.size());
exceptions.put(org.elasticsearch.search.fetch.FetchPhaseExecutionException.class, exceptions.size());
exceptions.put(org.elasticsearch.index.IndexShardAlreadyExistsException.class, exceptions.size());
exceptions.put(org.elasticsearch.index.engine.VersionConflictEngineException.class, exceptions.size());
exceptions.put(org.elasticsearch.index.engine.EngineException.class, exceptions.size());
exceptions.put(org.elasticsearch.index.engine.DocumentAlreadyExistsException.class, exceptions.size());
exceptions.put(org.elasticsearch.action.NoSuchNodeException.class, exceptions.size());
exceptions.put(org.elasticsearch.common.settings.SettingsException.class, exceptions.size());
exceptions.put(org.elasticsearch.indices.IndexTemplateMissingException.class, exceptions.size());
exceptions.put(org.elasticsearch.transport.SendRequestTransportException.class, exceptions.size());
exceptions.put(org.elasticsearch.common.util.concurrent.EsRejectedExecutionException.class, exceptions.size());
exceptions.put(org.elasticsearch.common.lucene.Lucene.EarlyTerminationException.class, exceptions.size());
exceptions.put(org.elasticsearch.cluster.routing.RoutingValidationException.class, exceptions.size());
exceptions.put(org.elasticsearch.common.io.stream.NotSerializableExceptionWrapper.class, exceptions.size());
exceptions.put(org.elasticsearch.indices.AliasFilterParsingException.class, exceptions.size());
exceptions.put(org.elasticsearch.index.engine.DeleteByQueryFailedEngineException.class, exceptions.size());
exceptions.put(org.elasticsearch.gateway.GatewayException.class, exceptions.size());
exceptions.put(org.elasticsearch.index.shard.IndexShardNotRecoveringException.class, exceptions.size());
exceptions.put(org.elasticsearch.http.HttpException.class, exceptions.size());
exceptions.put(org.elasticsearch.ElasticsearchException.class, exceptions.size());
exceptions.put(org.elasticsearch.snapshots.SnapshotMissingException.class, exceptions.size());
exceptions.put(org.elasticsearch.action.PrimaryMissingActionException.class, exceptions.size());
exceptions.put(org.elasticsearch.action.FailedNodeException.class, exceptions.size());
exceptions.put(org.elasticsearch.search.SearchParseException.class, exceptions.size());
exceptions.put(org.elasticsearch.snapshots.ConcurrentSnapshotExecutionException.class, exceptions.size());
exceptions.put(org.elasticsearch.common.blobstore.BlobStoreException.class, exceptions.size());
exceptions.put(org.elasticsearch.cluster.IncompatibleClusterStateVersionException.class, exceptions.size());
exceptions.put(org.elasticsearch.index.engine.RecoveryEngineException.class, exceptions.size());
exceptions.put(org.elasticsearch.common.util.concurrent.UncategorizedExecutionException.class, exceptions.size());
exceptions.put(org.elasticsearch.action.TimestampParsingException.class, exceptions.size());
exceptions.put(org.elasticsearch.action.RoutingMissingException.class, exceptions.size());
exceptions.put(org.elasticsearch.index.engine.IndexFailedEngineException.class, exceptions.size());
exceptions.put(org.elasticsearch.index.snapshots.IndexShardRestoreFailedException.class, exceptions.size());
exceptions.put(org.elasticsearch.repositories.RepositoryException.class, exceptions.size());
exceptions.put(org.elasticsearch.transport.ReceiveTimeoutTransportException.class, exceptions.size());
exceptions.put(org.elasticsearch.transport.NodeDisconnectedException.class, exceptions.size());
exceptions.put(org.elasticsearch.index.AlreadyExpiredException.class, exceptions.size());
exceptions.put(org.elasticsearch.search.aggregations.AggregationExecutionException.class, exceptions.size());
exceptions.put(org.elasticsearch.index.mapper.MergeMappingException.class, exceptions.size());
exceptions.put(org.elasticsearch.indices.InvalidIndexTemplateException.class, exceptions.size());
exceptions.put(org.elasticsearch.percolator.PercolateException.class, exceptions.size());
exceptions.put(org.elasticsearch.index.engine.RefreshFailedEngineException.class, exceptions.size());
exceptions.put(org.elasticsearch.search.aggregations.AggregationInitializationException.class, exceptions.size());
exceptions.put(org.elasticsearch.indices.recovery.DelayRecoveryException.class, exceptions.size());
exceptions.put(org.elasticsearch.search.warmer.IndexWarmerMissingException.class, exceptions.size());
exceptions.put(org.elasticsearch.client.transport.NoNodeAvailableException.class, exceptions.size());
exceptions.put(org.elasticsearch.script.groovy.GroovyScriptCompilationException.class, exceptions.size());
exceptions.put(org.elasticsearch.snapshots.InvalidSnapshotNameException.class, exceptions.size());
exceptions.put(org.elasticsearch.index.shard.IllegalIndexShardStateException.class, exceptions.size());
exceptions.put(org.elasticsearch.index.snapshots.IndexShardSnapshotException.class, exceptions.size());
exceptions.put(org.elasticsearch.index.shard.IndexShardNotStartedException.class, exceptions.size());
exceptions.put(org.elasticsearch.action.search.SearchPhaseExecutionException.class, exceptions.size());
exceptions.put(org.elasticsearch.transport.ActionNotFoundTransportException.class, exceptions.size());
exceptions.put(org.elasticsearch.transport.TransportSerializationException.class, exceptions.size());
exceptions.put(org.elasticsearch.transport.RemoteTransportException.class, exceptions.size());
exceptions.put(org.elasticsearch.index.engine.EngineCreationFailureException.class, exceptions.size());
exceptions.put(org.elasticsearch.cluster.routing.RoutingException.class, exceptions.size());
exceptions.put(org.elasticsearch.index.shard.IndexShardRecoveryException.class, exceptions.size());
exceptions.put(org.elasticsearch.repositories.RepositoryMissingException.class, exceptions.size());
exceptions.put(org.elasticsearch.index.percolator.PercolatorException.class, exceptions.size());
exceptions.put(org.elasticsearch.index.engine.DocumentSourceMissingException.class, exceptions.size());
exceptions.put(org.elasticsearch.index.engine.FlushNotAllowedEngineException.class, exceptions.size());
exceptions.put(org.elasticsearch.common.settings.NoClassSettingsException.class, exceptions.size());
exceptions.put(org.elasticsearch.transport.BindTransportException.class, exceptions.size());
exceptions.put(org.elasticsearch.rest.action.admin.indices.alias.delete.AliasesNotFoundException.class, exceptions.size());
exceptions.put(org.elasticsearch.index.shard.IndexShardRecoveringException.class, exceptions.size());
exceptions.put(org.elasticsearch.index.translog.TranslogException.class, exceptions.size());
exceptions.put(org.elasticsearch.cluster.metadata.ProcessClusterEventTimeoutException.class, exceptions.size());
exceptions.put(org.elasticsearch.action.support.replication.TransportReplicationAction.RetryOnPrimaryException.class, exceptions.size());
exceptions.put(org.elasticsearch.ElasticsearchTimeoutException.class, exceptions.size());
exceptions.put(org.elasticsearch.search.query.QueryPhaseExecutionException.class, exceptions.size());
exceptions.put(org.elasticsearch.repositories.RepositoryVerificationException.class, exceptions.size());
exceptions.put(org.elasticsearch.search.aggregations.InvalidAggregationPathException.class, exceptions.size());
exceptions.put(org.elasticsearch.script.groovy.GroovyScriptExecutionException.class, exceptions.size());
exceptions.put(org.elasticsearch.indices.IndexAlreadyExistsException.class, exceptions.size());
exceptions.put(org.elasticsearch.script.Script.ScriptParseException.class, exceptions.size());
exceptions.put(org.elasticsearch.transport.netty.SizeHeaderFrameDecoder.HttpOnTransportException.class, exceptions.size());
exceptions.put(org.elasticsearch.index.mapper.MapperParsingException.class, exceptions.size());
exceptions.put(org.elasticsearch.search.SearchContextException.class, exceptions.size());
exceptions.put(org.elasticsearch.search.builder.SearchSourceBuilderException.class, exceptions.size());
exceptions.put(org.elasticsearch.index.engine.EngineClosedException.class, exceptions.size());
exceptions.put(org.elasticsearch.action.NoShardAvailableActionException.class, exceptions.size());
exceptions.put(org.elasticsearch.action.UnavailableShardsException.class, exceptions.size());
exceptions.put(org.elasticsearch.index.engine.FlushFailedEngineException.class, exceptions.size());
exceptions.put(org.elasticsearch.common.breaker.CircuitBreakingException.class, exceptions.size());
exceptions.put(org.elasticsearch.transport.NodeNotConnectedException.class, exceptions.size());
exceptions.put(org.elasticsearch.index.mapper.StrictDynamicMappingException.class, exceptions.size());
exceptions.put(org.elasticsearch.action.support.replication.TransportReplicationAction.RetryOnReplicaException.class, exceptions.size());
exceptions.put(org.elasticsearch.indices.TypeMissingException.class, exceptions.size());
// added in 3.x
exceptions.put(org.elasticsearch.discovery.Discovery.FailedToCommitClusterStateException.class, 140);
final int maxOrd = 140;
assert exceptions.size() == maxOrd + 1;
Constructor<? extends ElasticsearchException>[] idToSupplier = new Constructor[maxOrd + 1];
exceptions.put(org.elasticsearch.discovery.Discovery.FailedToCommitClusterStateException.class, exceptions.size());
exceptions.put(org.elasticsearch.index.query.QueryShardException.class, exceptions.size());
// NOTE: ONLY APPEND TO THE END and NEVER REMOVE EXCEPTIONS IN MINOR VERSIONS
Constructor<? extends ElasticsearchException>[] idToSupplier = new Constructor[exceptions.size()];
for (Map.Entry<Class<? extends ElasticsearchException>, Integer> e : exceptions.entrySet()) {
try {
Constructor<? extends ElasticsearchException> constructor = e.getKey().getDeclaredConstructor(StreamInput.class);

View File

@ -73,7 +73,8 @@ public class TransportCreateIndexAction extends TransportMasterNodeAction<Create
cause = "api";
}
final CreateIndexClusterStateUpdateRequest updateRequest = new CreateIndexClusterStateUpdateRequest(request, cause, request.index(), request.updateAllTypes())
final String indexName = indexNameExpressionResolver.resolveDateMathExpression(request.index());
final CreateIndexClusterStateUpdateRequest updateRequest = new CreateIndexClusterStateUpdateRequest(request, cause, indexName, request.updateAllTypes())
.ackTimeout(request.timeout()).masterNodeTimeout(request.masterNodeTimeout())
.settings(request.settings()).mappings(request.mappings())
.aliases(request.aliases()).customs(request.customs());

View File

@ -42,6 +42,7 @@ import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.query.IndexQueryParserService;
import org.elasticsearch.index.query.QueryShardException;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.indices.IndicesService;
@ -188,8 +189,8 @@ public class TransportValidateQueryAction extends TransportBroadcastAction<Valid
}
if (request.rewrite()) {
explanation = getRewrittenQuery(searcher.searcher(), searchContext.query());
}
} catch (ParsingException e) {
}
} catch (QueryShardException|ParsingException e) {
valid = false;
error = e.getDetailedMessage();
} catch (AssertionError|IOException e) {

View File

@ -19,7 +19,6 @@
package org.elasticsearch.action.admin.indices.validate.query;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.QuerySourceBuilder;
import org.elasticsearch.action.support.broadcast.BroadcastOperationRequestBuilder;
import org.elasticsearch.client.ElasticsearchClient;

View File

@ -41,7 +41,7 @@ import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.script.ScriptService;
@ -166,10 +166,10 @@ public class TransportExistsAction extends TransportBroadcastAction<ExistsReques
BytesReference source = request.querySource();
if (source != null && source.length() > 0) {
try {
QueryParseContext.setTypes(request.types());
QueryShardContext.setTypes(request.types());
context.parsedQuery(indexService.queryParserService().parseQuery(source));
} finally {
QueryParseContext.removeTypes();
QueryShardContext.removeTypes();
}
}
context.preProcess();

View File

@ -715,8 +715,13 @@ public class SearchRequestBuilder extends ActionRequestBuilder<SearchRequest, Se
return this;
}
public SearchRequestBuilder addInnerHit(String name, InnerHitsBuilder.InnerHit innerHit) {
innerHitsBuilder().addInnerHit(name, innerHit);
public SearchRequestBuilder addParentChildInnerHits(String name, String type, InnerHitsBuilder.InnerHit innerHit) {
innerHitsBuilder().addParentChildInnerHits(name, type, innerHit);
return this;
}
public SearchRequestBuilder addNestedInnerHits(String name, String path, InnerHitsBuilder.InnerHit innerHit) {
innerHitsBuilder().addNestedInnerHits(name, path, innerHit);
return this;
}

View File

@ -20,7 +20,6 @@
package org.elasticsearch.action.termvectors;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.TransportActions;
import org.elasticsearch.action.support.single.shard.TransportSingleShardAction;
@ -81,7 +80,7 @@ public class TransportShardMultiTermsVectorAction extends TransportSingleShardAc
try {
IndexService indexService = indicesService.indexServiceSafe(request.index());
IndexShard indexShard = indexService.shardSafe(shardId.id());
TermVectorsResponse termVectorsResponse = indexShard.termVectorsService().getTermVectors(termVectorsRequest, shardId.getIndex());
TermVectorsResponse termVectorsResponse = indexShard.getTermVectors(termVectorsRequest);
termVectorsResponse.updateTookInMillis(termVectorsRequest.startTime());
response.add(request.locations.get(i), termVectorsResponse);
} catch (Throwable t) {

View File

@ -83,7 +83,7 @@ public class TransportTermVectorsAction extends TransportSingleShardAction<TermV
protected TermVectorsResponse shardOperation(TermVectorsRequest request, ShardId shardId) {
IndexService indexService = indicesService.indexServiceSafe(shardId.getIndex());
IndexShard indexShard = indexService.shardSafe(shardId.id());
TermVectorsResponse response = indexShard.termVectorsService().getTermVectors(request, shardId.getIndex());
TermVectorsResponse response = indexShard.getTermVectors(request);
response.updateTookInMillis(request.startTime());
return response;
}

View File

@ -79,7 +79,7 @@ final class Bootstrap {
}
/** initialize native resources */
public static void initializeNatives(boolean mlockAll, boolean ctrlHandler) {
public static void initializeNatives(boolean mlockAll, boolean seccomp, boolean ctrlHandler) {
final ESLogger logger = Loggers.getLogger(Bootstrap.class);
// check if the user is running as root, and bail
@ -91,6 +91,11 @@ final class Bootstrap {
}
}
// enable secure computing mode
if (seccomp) {
Natives.trySeccomp();
}
// mlockall if requested
if (mlockAll) {
if (Constants.WINDOWS) {
@ -134,7 +139,8 @@ final class Bootstrap {
private void setup(boolean addShutdownHook, Settings settings, Environment environment) throws Exception {
initializeNatives(settings.getAsBoolean("bootstrap.mlockall", false),
settings.getAsBoolean("bootstrap.ctrlhandler", true));
settings.getAsBoolean("bootstrap.seccomp", true),
settings.getAsBoolean("bootstrap.ctrlhandler", true));
// initialize probes before the security manager is installed
initializeProbes();

View File

@ -43,4 +43,11 @@ public final class BootstrapInfo {
public static boolean isMemoryLocked() {
return Natives.isMemoryLocked();
}
/**
* Returns true if secure computing mode is enabled (linux/amd64 only)
*/
public static boolean isSeccompInstalled() {
return Natives.isSeccompInstalled();
}
}

View File

@ -41,6 +41,8 @@ class JNANatives {
// Set to true, in case native mlockall call was successful
static boolean LOCAL_MLOCKALL = false;
// Set to true, in case native seccomp call was successful
static boolean LOCAL_SECCOMP = false;
static void tryMlockall() {
int errno = Integer.MIN_VALUE;
@ -170,4 +172,19 @@ class JNANatives {
}
}
static void trySeccomp() {
if (Constants.LINUX && "amd64".equals(Constants.OS_ARCH)) {
try {
Seccomp.installFilter();
LOCAL_SECCOMP = true;
} catch (Exception e) {
// this is likely to happen unless the kernel is newish, its a best effort at the moment
// so we log stacktrace at debug for now...
if (logger.isDebugEnabled()) {
logger.debug("unable to install seccomp filter", e);
}
logger.warn("unable to install seccomp filter: " + e.getMessage());
}
}
}
}

View File

@ -88,4 +88,19 @@ final class Natives {
}
return JNANatives.LOCAL_MLOCKALL;
}
static void trySeccomp() {
if (!JNA_AVAILABLE) {
logger.warn("cannot install seccomp filters because JNA is not available");
return;
}
JNANatives.trySeccomp();
}
static boolean isSeccompInstalled() {
if (!JNA_AVAILABLE) {
return false;
}
return JNANatives.LOCAL_SECCOMP;
}
}

View File

@ -0,0 +1,271 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.bootstrap;
import com.sun.jna.Library;
import com.sun.jna.Memory;
import com.sun.jna.Native;
import com.sun.jna.Pointer;
import com.sun.jna.Structure;
import org.apache.lucene.util.Constants;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.Arrays;
import java.util.List;
/**
* Installs a limited form of Linux secure computing mode (filter mode).
* This filters system calls to block process execution.
* <p>
* This is only supported on the amd64 architecture, on Linux kernels 3.5 or above, and requires
* {@code CONFIG_SECCOMP} and {@code CONFIG_SECCOMP_FILTER} compiled into the kernel.
* <p>
* Filters are installed using either {@code seccomp(2)} (3.17+) or {@code prctl(2)} (3.5+). {@code seccomp(2)}
* is preferred, as it allows filters to be applied to any existing threads in the process, and one motivation
* here is to protect against bugs in the JVM. Otherwise, code will fall back to the {@code prctl(2)} method
* which will at least protect elasticsearch application threads.
* <p>
* The filters will return {@code EACCES} (Access Denied) for the following system calls:
* <ul>
* <li>{@code execve}</li>
* <li>{@code fork}</li>
* <li>{@code vfork}</li>
* </ul>
* <p>
* This is not intended as a sandbox. It is another level of security, mostly intended to annoy
* security researchers and make their lives more difficult in achieving "remote execution" exploits.
* @see <a href="http://www.kernel.org/doc/Documentation/prctl/seccomp_filter.txt">
* http://www.kernel.org/doc/Documentation/prctl/seccomp_filter.txt</a>
*/
// only supported on linux/amd64
// not an example of how to write code!!!
final class Seccomp {
private static final ESLogger logger = Loggers.getLogger(Seccomp.class);
/** we use an explicit interface for native methods, for varargs support */
static interface LinuxLibrary extends Library {
/**
* maps to prctl(2)
*/
int prctl(int option, long arg2, long arg3, long arg4, long arg5);
/**
* used to call seccomp(2), its too new...
* this is the only way, DONT use it on some other architecture unless you know wtf you are doing
*/
long syscall(long number, Object... args);
};
// null if something goes wrong.
static final LinuxLibrary libc;
static {
LinuxLibrary lib = null;
try {
lib = (LinuxLibrary) Native.loadLibrary("c", LinuxLibrary.class);
} catch (UnsatisfiedLinkError e) {
logger.warn("unable to link C library. native methods (seccomp) will be disabled.", e);
}
libc = lib;
}
/** the preferred method is seccomp(2), since we can apply to all threads of the process */
static final int SECCOMP_SYSCALL_NR = 317; // since Linux 3.17
static final int SECCOMP_SET_MODE_FILTER = 1; // since Linux 3.17
static final int SECCOMP_FILTER_FLAG_TSYNC = 1; // since Linux 3.17
/** otherwise, we can use prctl(2), which will at least protect ES application threads */
static final int PR_GET_NO_NEW_PRIVS = 39; // since Linux 3.5
static final int PR_SET_NO_NEW_PRIVS = 38; // since Linux 3.5
static final int PR_GET_SECCOMP = 21; // since Linux 2.6.23
static final int PR_SET_SECCOMP = 22; // since Linux 2.6.23
static final int SECCOMP_MODE_FILTER = 2; // since Linux Linux 3.5
/** corresponds to struct sock_filter */
static final class SockFilter {
short code; // insn
byte jt; // number of insn to jump (skip) if true
byte jf; // number of insn to jump (skip) if false
int k; // additional data
SockFilter(short code, byte jt, byte jf, int k) {
this.code = code;
this.jt = jt;
this.jf = jf;
this.k = k;
}
}
/** corresponds to struct sock_fprog */
public static final class SockFProg extends Structure implements Structure.ByReference {
public short len; // number of filters
public Pointer filter; // filters
public SockFProg(SockFilter filters[]) {
len = (short) filters.length;
// serialize struct sock_filter * explicitly, its less confusing than the JNA magic we would need
Memory filter = new Memory(len * 8);
ByteBuffer bbuf = filter.getByteBuffer(0, len * 8);
bbuf.order(ByteOrder.nativeOrder()); // little endian
for (SockFilter f : filters) {
bbuf.putShort(f.code);
bbuf.put(f.jt);
bbuf.put(f.jf);
bbuf.putInt(f.k);
}
this.filter = filter;
}
@Override
protected List<String> getFieldOrder() {
return Arrays.asList(new String[] { "len", "filter" });
}
}
// BPF "macros" and constants
static final int BPF_LD = 0x00;
static final int BPF_W = 0x00;
static final int BPF_ABS = 0x20;
static final int BPF_JMP = 0x05;
static final int BPF_JEQ = 0x10;
static final int BPF_JGE = 0x30;
static final int BPF_JGT = 0x20;
static final int BPF_RET = 0x06;
static final int BPF_K = 0x00;
static SockFilter BPF_STMT(int code, int k) {
return new SockFilter((short) code, (byte) 0, (byte) 0, k);
}
static SockFilter BPF_JUMP(int code, int k, int jt, int jf) {
return new SockFilter((short) code, (byte) jt, (byte) jf, k);
}
static final int AUDIT_ARCH_X86_64 = 0xC000003E;
static final int SECCOMP_RET_ERRNO = 0x00050000;
static final int SECCOMP_RET_DATA = 0x0000FFFF;
static final int SECCOMP_RET_ALLOW = 0x7FFF0000;
// some errno constants for error checking/handling
static final int EACCES = 0x0D;
static final int EFAULT = 0x0E;
static final int EINVAL = 0x16;
static final int ENOSYS = 0x26;
// offsets (arch dependent) that our BPF checks
static final int SECCOMP_DATA_NR_OFFSET = 0x00;
static final int SECCOMP_DATA_ARCH_OFFSET = 0x04;
// currently this range is blocked (inclusive):
// execve is really the only one needed but why let someone fork a 30G heap? (not really what happens)
// ...
// 57: fork
// 58: vfork
// 59: execve
// ...
static final int BLACKLIST_START = 57;
static final int BLACKLIST_END = 59;
// TODO: execveat()? its less of a risk since the jvm does not use it...
/** try to install our filters */
static void installFilter() {
// first be defensive: we can give nice errors this way, at the very least.
// also, some of these security features get backported to old versions, checking kernel version here is a big no-no!
boolean supported = Constants.LINUX && "amd64".equals(Constants.OS_ARCH);
if (supported == false) {
throw new IllegalStateException("bug: should not be trying to initialize seccomp for an unsupported architecture");
}
// we couldn't link methods, could be some really ancient kernel (e.g. < 2.1.57) or some bug
if (libc == null) {
throw new UnsupportedOperationException("seccomp unavailable: could not link methods. requires kernel 3.5+ with CONFIG_SECCOMP and CONFIG_SECCOMP_FILTER compiled in");
}
// check for kernel version
if (libc.prctl(PR_GET_NO_NEW_PRIVS, 0, 0, 0, 0) < 0) {
int errno = Native.getLastError();
switch (errno) {
case ENOSYS: throw new UnsupportedOperationException("seccomp unavailable: requires kernel 3.5+ with CONFIG_SECCOMP and CONFIG_SECCOMP_FILTER compiled in");
default: throw new UnsupportedOperationException("prctl(PR_GET_NO_NEW_PRIVS): " + JNACLibrary.strerror(errno));
}
}
// check for SECCOMP
if (libc.prctl(PR_GET_SECCOMP, 0, 0, 0, 0) < 0) {
int errno = Native.getLastError();
switch (errno) {
case EINVAL: throw new UnsupportedOperationException("seccomp unavailable: CONFIG_SECCOMP not compiled into kernel, CONFIG_SECCOMP and CONFIG_SECCOMP_FILTER are needed");
default: throw new UnsupportedOperationException("prctl(PR_GET_SECCOMP): " + JNACLibrary.strerror(errno));
}
}
// check for SECCOMP_MODE_FILTER
if (libc.prctl(PR_SET_SECCOMP, SECCOMP_MODE_FILTER, 0, 0, 0) < 0) {
int errno = Native.getLastError();
switch (errno) {
case EFAULT: break; // available
case EINVAL: throw new UnsupportedOperationException("seccomp unavailable: CONFIG_SECCOMP_FILTER not compiled into kernel, CONFIG_SECCOMP and CONFIG_SECCOMP_FILTER are needed");
default: throw new UnsupportedOperationException("prctl(PR_SET_SECCOMP): " + JNACLibrary.strerror(errno));
}
}
// ok, now set PR_SET_NO_NEW_PRIVS, needed to be able to set a seccomp filter as ordinary user
if (libc.prctl(PR_SET_NO_NEW_PRIVS, 1, 0, 0, 0) < 0) {
throw new UnsupportedOperationException("prctl(PR_SET_NO_NEW_PRIVS): " + JNACLibrary.strerror(Native.getLastError()));
}
// BPF installed to check arch, then syscall range. See https://www.kernel.org/doc/Documentation/prctl/seccomp_filter.txt for details.
SockFilter insns[] = {
/* 1 */ BPF_STMT(BPF_LD + BPF_W + BPF_ABS, SECCOMP_DATA_ARCH_OFFSET), // if (arch != amd64) goto fail;
/* 2 */ BPF_JUMP(BPF_JMP + BPF_JEQ + BPF_K, AUDIT_ARCH_X86_64, 0, 3), //
/* 3 */ BPF_STMT(BPF_LD + BPF_W + BPF_ABS, SECCOMP_DATA_NR_OFFSET), // if (syscall < BLACKLIST_START) goto pass;
/* 4 */ BPF_JUMP(BPF_JMP + BPF_JGE + BPF_K, BLACKLIST_START, 0, 2), //
/* 5 */ BPF_JUMP(BPF_JMP + BPF_JGT + BPF_K, BLACKLIST_END, 1, 0), // if (syscall > BLACKLIST_END) goto pass;
/* 6 */ BPF_STMT(BPF_RET + BPF_K, SECCOMP_RET_ERRNO | (EACCES & SECCOMP_RET_DATA)), // fail: return EACCES;
/* 7 */ BPF_STMT(BPF_RET + BPF_K, SECCOMP_RET_ALLOW) // pass: return OK;
};
// seccomp takes a long, so we pass it one explicitly to keep the JNA simple
SockFProg prog = new SockFProg(insns);
prog.write();
long pointer = Pointer.nativeValue(prog.getPointer());
// install filter, if this works, after this there is no going back!
// first try it with seccomp(SECCOMP_SET_MODE_FILTER), falling back to prctl()
if (libc.syscall(SECCOMP_SYSCALL_NR, SECCOMP_SET_MODE_FILTER, SECCOMP_FILTER_FLAG_TSYNC, pointer) != 0) {
int errno1 = Native.getLastError();
if (logger.isDebugEnabled()) {
logger.debug("seccomp(SECCOMP_SET_MODE_FILTER): " + JNACLibrary.strerror(errno1) + ", falling back to prctl(PR_SET_SECCOMP)...");
}
if (libc.prctl(PR_SET_SECCOMP, SECCOMP_MODE_FILTER, pointer, 0, 0) < 0) {
int errno2 = Native.getLastError();
throw new UnsupportedOperationException("seccomp(SECCOMP_SET_MODE_FILTER): " + JNACLibrary.strerror(errno1) +
", prctl(PR_SET_SECCOMP): " + JNACLibrary.strerror(errno2));
}
}
// now check that the filter was really installed, we should be in filter mode.
if (libc.prctl(PR_GET_SECCOMP, 0, 0, 0, 0) != 2) {
throw new UnsupportedOperationException("seccomp filter installation did not really succeed. seccomp(PR_GET_SECCOMP): " + JNACLibrary.strerror(Native.getLastError()));
}
}
}

View File

@ -28,7 +28,7 @@ import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.query.IndexQueryParserService;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.indices.InvalidAliasNameException;
import java.io.IOException;
@ -142,10 +142,10 @@ public class AliasValidator extends AbstractComponent {
}
private void validateAliasFilter(XContentParser parser, IndexQueryParserService indexQueryParserService) throws IOException {
QueryParseContext context = indexQueryParserService.getParseContext();
QueryShardContext context = indexQueryParserService.getShardContext();
try {
context.reset(parser);
context.parseInnerFilter();
context.parseContext().parseInnerQueryBuilder().toFilter(context);
} finally {
context.reset(null);
parser.close();

View File

@ -219,6 +219,15 @@ public class IndexNameExpressionResolver extends AbstractComponent {
return state.metaData().getAliasAndIndexLookup().containsKey(resolvedAliasOrIndex);
}
/**
* @return If the specified string is data math expression then this method returns the resolved expression.
*/
public String resolveDateMathExpression(String dateExpression) {
// The data math expression resolver doesn't rely on cluster state or indices options, because
// it just resolves the date math to an actual date.
return dateMathExpressionResolver.resolveExpression(dateExpression, new Context(null, null));
}
/**
* Iterates through the list of indices and selects the effective list of filtering aliases for the
* given index.

View File

@ -171,4 +171,11 @@ public final class Numbers {
return longToBytes(Double.doubleToRawLongBits(val));
}
/** Returns true if value is neither NaN nor infinite. */
public static boolean isValidDouble(double value) {
if (Double.isNaN(value) || Double.isInfinite(value)) {
return false;
}
return true;
}
}

View File

@ -24,15 +24,14 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentLocation;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.rest.RestStatus;
import java.io.IOException;
/**
*
* Exception that can be used when parsing queries with a given {@link QueryParseContext}.
* Can contain information about location of the error.
*/
public class ParsingException extends ElasticsearchException {
@ -40,25 +39,17 @@ public class ParsingException extends ElasticsearchException {
private final int lineNumber;
private final int columnNumber;
public ParsingException(QueryParseContext parseContext, String msg, Object... args) {
this(parseContext, msg, null, args);
public ParsingException(XContentLocation contentLocation, String msg, Object... args) {
this(contentLocation, msg, null, args);
}
public ParsingException(QueryParseContext parseContext, String msg, Throwable cause, Object... args) {
this(parseContext.index(), parseContext.parser(), msg, cause, args);
}
public ParsingException(Index index, XContentParser parser, String msg, Throwable cause, Object... args) {
public ParsingException(XContentLocation contentLocation, String msg, Throwable cause, Object... args) {
super(msg, cause, args);
setIndex(index);
int lineNumber = UNKNOWN_POSITION;
int columnNumber = UNKNOWN_POSITION;
if (parser != null) {
XContentLocation location = parser.getTokenLocation();
if (location != null) {
lineNumber = location.lineNumber;
columnNumber = location.columnNumber;
}
if (contentLocation != null) {
lineNumber = contentLocation.lineNumber;
columnNumber = contentLocation.columnNumber;
}
this.columnNumber = columnNumber;
this.lineNumber = lineNumber;
@ -68,16 +59,21 @@ public class ParsingException extends ElasticsearchException {
* This constructor is provided for use in unit tests where a
* {@link QueryParseContext} may not be available
*/
public ParsingException(Index index, int line, int col, String msg, Throwable cause) {
public ParsingException(int line, int col, String msg, Throwable cause) {
super(msg, cause);
setIndex(index);
this.lineNumber = line;
this.columnNumber = col;
}
public ParsingException(StreamInput in) throws IOException{
super(in);
lineNumber = in.readInt();
columnNumber = in.readInt();
}
/**
* Line number of the location of the error
*
*
* @return the line number or -1 if unknown
*/
public int getLineNumber() {
@ -86,7 +82,7 @@ public class ParsingException extends ElasticsearchException {
/**
* Column number of the location of the error
*
*
* @return the column number or -1 if unknown
*/
public int getColumnNumber() {
@ -113,11 +109,4 @@ public class ParsingException extends ElasticsearchException {
out.writeInt(lineNumber);
out.writeInt(columnNumber);
}
public ParsingException(StreamInput in) throws IOException{
super(in);
lineNumber = in.readInt();
columnNumber = in.readInt();
}
}

View File

@ -570,7 +570,6 @@ public class Strings {
count++;
}
}
// TODO (MvG): No push: hppc or jcf?
final Set<String> result = new HashSet<>(count);
final int len = chars.length;
int start = 0; // starting index in chars of the current substring.

View File

@ -21,6 +21,9 @@ package org.elasticsearch.common.geo;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.SloppyMath;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.index.fielddata.FieldData;
import org.elasticsearch.index.fielddata.GeoPointValues;
@ -28,17 +31,17 @@ import org.elasticsearch.index.fielddata.MultiGeoPointValues;
import org.elasticsearch.index.fielddata.NumericDoubleValues;
import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
import org.elasticsearch.index.fielddata.SortingNumericDoubleValues;
import java.io.IOException;
import java.util.Locale;
/**
* Geo distance calculation.
*/
public enum GeoDistance {
public enum GeoDistance implements Writeable<GeoDistance> {
/**
* Calculates distance as points on a plane. Faster, but less accurate than {@link #ARC}.
*/
PLANE() {
PLANE {
@Override
public double calculate(double sourceLatitude, double sourceLongitude, double targetLatitude, double targetLongitude, DistanceUnit unit) {
double px = targetLongitude - sourceLongitude;
@ -60,7 +63,7 @@ public enum GeoDistance {
/**
* Calculates distance factor.
*/
FACTOR() {
FACTOR {
@Override
public double calculate(double sourceLatitude, double sourceLongitude, double targetLatitude, double targetLongitude, DistanceUnit unit) {
double longitudeDifference = targetLongitude - sourceLongitude;
@ -82,7 +85,7 @@ public enum GeoDistance {
/**
* Calculates distance as points on a globe.
*/
ARC() {
ARC {
@Override
public double calculate(double sourceLatitude, double sourceLongitude, double targetLatitude, double targetLongitude, DistanceUnit unit) {
double x1 = sourceLatitude * Math.PI / 180D;
@ -109,7 +112,7 @@ public enum GeoDistance {
* Calculates distance as points on a globe in a sloppy way. Close to the pole areas the accuracy
* of this function decreases.
*/
SLOPPY_ARC() {
SLOPPY_ARC {
@Override
public double normalize(double distance, DistanceUnit unit) {
@ -127,12 +130,31 @@ public enum GeoDistance {
}
};
/** Returns a GeoDistance object as read from the StreamInput. */
@Override
public GeoDistance readFrom(StreamInput in) throws IOException {
int ord = in.readVInt();
if (ord < 0 || ord >= values().length) {
throw new IOException("Unknown GeoDistance ordinal [" + ord + "]");
}
return GeoDistance.values()[ord];
}
public static GeoDistance readGeoDistanceFrom(StreamInput in) throws IOException {
return DEFAULT.readFrom(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeVInt(this.ordinal());
}
/**
* Default {@link GeoDistance} function. This method should be used, If no specific function has been selected.
* This is an alias for <code>SLOPPY_ARC</code>
*/
public static final GeoDistance DEFAULT = SLOPPY_ARC;
public static final GeoDistance DEFAULT = SLOPPY_ARC;
public abstract double normalize(double distance, DistanceUnit unit);
public abstract double calculate(double sourceLatitude, double sourceLongitude, double targetLatitude, double targetLongitude, DistanceUnit unit);
@ -180,14 +202,14 @@ public enum GeoDistance {
/**
* Get a {@link GeoDistance} according to a given name. Valid values are
*
*
* <ul>
* <li><b>plane</b> for <code>GeoDistance.PLANE</code></li>
* <li><b>sloppy_arc</b> for <code>GeoDistance.SLOPPY_ARC</code></li>
* <li><b>factor</b> for <code>GeoDistance.FACTOR</code></li>
* <li><b>arc</b> for <code>GeoDistance.ARC</code></li>
* </ul>
*
*
* @param name name of the {@link GeoDistance}
* @return a {@link GeoDistance}
*/
@ -336,7 +358,7 @@ public enum GeoDistance {
/**
* Basic implementation of {@link FixedSourceDistance}. This class keeps the basic parameters for a distance
* functions based on a fixed source. Namely latitude, longitude and unit.
* functions based on a fixed source. Namely latitude, longitude and unit.
*/
public static abstract class FixedSourceDistanceBase implements FixedSourceDistance {
protected final double sourceLatitude;
@ -349,7 +371,7 @@ public enum GeoDistance {
this.unit = unit;
}
}
public static class ArcFixedSourceDistance extends FixedSourceDistanceBase {
public ArcFixedSourceDistance(double sourceLatitude, double sourceLongitude, DistanceUnit unit) {

View File

@ -19,6 +19,11 @@
package org.elasticsearch.common.geo;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import java.io.IOException;
import org.apache.lucene.util.BitUtil;
import org.apache.lucene.util.XGeoHashUtils;
@ -27,11 +32,14 @@ import org.apache.lucene.util.XGeoUtils;
/**
*
*/
public final class GeoPoint {
public final class GeoPoint implements Writeable<GeoPoint> {
private double lat;
private double lon;
private final static double TOLERANCE = XGeoUtils.TOLERANCE;
// for serialization purposes
private static final GeoPoint PROTOTYPE = new GeoPoint(Double.NaN, Double.NaN);
public GeoPoint() {
}
@ -51,6 +59,10 @@ public final class GeoPoint {
this.lon = lon;
}
public GeoPoint(GeoPoint template) {
this(template.getLat(), template.getLon());
}
public GeoPoint reset(double lat, double lon) {
this.lat = lat;
this.lon = lon;
@ -152,8 +164,7 @@ public final class GeoPoint {
}
public static GeoPoint parseFromLatLon(String latLon) {
GeoPoint point = new GeoPoint();
point.resetFromString(latLon);
GeoPoint point = new GeoPoint(latLon);
return point;
}
@ -168,4 +179,21 @@ public final class GeoPoint {
public static GeoPoint fromIndexLong(long indexLong) {
return new GeoPoint().resetFromIndexHash(indexLong);
}
}
@Override
public GeoPoint readFrom(StreamInput in) throws IOException {
double lat = in.readDouble();
double lon = in.readDouble();
return new GeoPoint(lat, lon);
}
public static GeoPoint readGeoPointFrom(StreamInput in) throws IOException {
return PROTOTYPE.readFrom(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeDouble(lat);
out.writeDouble(lon);
}
}

View File

@ -23,6 +23,7 @@ import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree;
import org.apache.lucene.spatial.prefix.tree.QuadPrefixTree;
import org.apache.lucene.util.SloppyMath;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.Numbers;
import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParser.Token;
@ -34,10 +35,19 @@ import java.io.IOException;
*/
public class GeoUtils {
/** Maximum valid latitude in degrees. */
public static final double MAX_LAT = 90.0;
/** Minimum valid latitude in degrees. */
public static final double MIN_LAT = -90.0;
/** Maximum valid longitude in degrees. */
public static final double MAX_LON = 180.0;
/** Minimum valid longitude in degrees. */
public static final double MIN_LON = -180.0;
public static final String LATITUDE = GeoPointFieldMapper.Names.LAT;
public static final String LONGITUDE = GeoPointFieldMapper.Names.LON;
public static final String GEOHASH = GeoPointFieldMapper.Names.GEOHASH;
/** Earth ellipsoid major axis defined by WGS 84 in meters */
public static final double EARTH_SEMI_MAJOR_AXIS = 6378137.0; // meters (WGS 84)
@ -56,6 +66,22 @@ public class GeoUtils {
/** Earth ellipsoid polar distance in meters */
public static final double EARTH_POLAR_DISTANCE = Math.PI * EARTH_SEMI_MINOR_AXIS;
/** Returns true if latitude is actually a valid latitude value.*/
public static boolean isValidLatitude(double latitude) {
if (Double.isNaN(latitude) || Double.isInfinite(latitude) || latitude < GeoUtils.MIN_LAT || latitude > GeoUtils.MAX_LAT) {
return false;
}
return true;
}
/** Returns true if longitude is actually a valid longitude value. */
public static boolean isValidLongitude(double longitude) {
if (Double.isNaN(longitude) || Double.isNaN(longitude) || longitude < GeoUtils.MIN_LON || longitude > GeoUtils.MAX_LON) {
return false;
}
return true;
}
/**
* Return an approximate value of the diameter of the earth (in meters) at the given latitude (in radians).
*/

View File

@ -19,13 +19,18 @@
package org.elasticsearch.common.geo;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import java.io.IOException;
import java.util.Locale;
/**
* Enum representing the relationship between a Query / Filter Shape and indexed Shapes
* that will be used to determine if a Document should be matched or not
*/
public enum ShapeRelation {
public enum ShapeRelation implements Writeable<ShapeRelation>{
INTERSECTS("intersects"),
DISJOINT("disjoint"),
@ -37,6 +42,20 @@ public enum ShapeRelation {
this.relationName = relationName;
}
@Override
public ShapeRelation readFrom(StreamInput in) throws IOException {
int ordinal = in.readVInt();
if (ordinal < 0 || ordinal >= values().length) {
throw new IOException("Unknown ShapeRelation ordinal [" + ordinal + "]");
}
return values()[ordinal];
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeVInt(ordinal());
}
public static ShapeRelation getRelationByName(String name) {
name = name.toLowerCase(Locale.ENGLISH);
for (ShapeRelation relation : ShapeRelation.values()) {

View File

@ -18,11 +18,16 @@
*/
package org.elasticsearch.common.geo;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import java.io.IOException;
/**
*
*/
public enum SpatialStrategy {
public enum SpatialStrategy implements Writeable<SpatialStrategy> {
TERM("term"),
RECURSIVE("recursive");
@ -36,4 +41,27 @@ public enum SpatialStrategy {
public String getStrategyName() {
return strategyName;
}
@Override
public SpatialStrategy readFrom(StreamInput in) throws IOException {
int ordinal = in.readVInt();
if (ordinal < 0 || ordinal >= values().length) {
throw new IOException("Unknown SpatialStrategy ordinal [" + ordinal + "]");
}
return values()[ordinal];
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeVInt(ordinal());
}
public static SpatialStrategy fromString(String strategyName) {
for (SpatialStrategy strategy : values()) {
if (strategy.strategyName.equals(strategyName)) {
return strategy;
}
}
return null;
}
}

View File

@ -27,6 +27,7 @@ import com.vividsolutions.jts.geom.Coordinate;
import com.vividsolutions.jts.geom.Geometry;
import com.vividsolutions.jts.geom.GeometryFactory;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.action.support.ToXContentToBytes;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.ESLoggerFactory;
import org.elasticsearch.common.unit.DistanceUnit.Distance;
@ -34,7 +35,6 @@ import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.index.mapper.geo.GeoShapeFieldMapper;
import java.io.IOException;
@ -43,7 +43,7 @@ import java.util.*;
/**
* Basic class for building GeoJSON shapes like Polygons, Linestrings, etc
*/
public abstract class ShapeBuilder implements ToXContent {
public abstract class ShapeBuilder extends ToXContentToBytes {
protected static final ESLogger LOGGER = ESLoggerFactory.getLogger(ShapeBuilder.class.getName());
@ -209,16 +209,6 @@ public abstract class ShapeBuilder implements ToXContent {
*/
public static EnvelopeBuilder newEnvelope(Orientation orientation) { return new EnvelopeBuilder(orientation); }
@Override
public String toString() {
try {
XContentBuilder xcontent = JsonXContent.contentBuilder();
return toXContent(xcontent, EMPTY_PARAMS).prettyPrint().string();
} catch (IOException e) {
return super.toString();
}
}
/**
* Create a new Shape from this builder. Since calling this method could change the
* defined shape. (by inserting new coordinates or change the position of points)

View File

@ -68,4 +68,4 @@ public abstract class FilterStreamInput extends StreamInput {
public void setVersion(Version version) {
delegate.setVersion(version);
}
}
}

View File

@ -33,6 +33,8 @@ import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.text.StringAndBytesText;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
@ -55,8 +57,18 @@ import static org.elasticsearch.ElasticsearchException.readStackTrace;
public abstract class StreamInput extends InputStream {
private final NamedWriteableRegistry namedWriteableRegistry;
private Version version = Version.CURRENT;
protected StreamInput() {
this.namedWriteableRegistry = new NamedWriteableRegistry();
}
protected StreamInput(NamedWriteableRegistry namedWriteableRegistry) {
this.namedWriteableRegistry = namedWriteableRegistry;
}
public Version getVersion() {
return this.version;
}
@ -349,6 +361,13 @@ public abstract class StreamInput extends InputStream {
return ret;
}
public String[] readOptionalStringArray() throws IOException {
if (readBoolean()) {
return readStringArray();
}
return null;
}
@Nullable
@SuppressWarnings("unchecked")
public Map<String, Object> readMap() throws IOException {
@ -571,6 +590,20 @@ public abstract class StreamInput extends InputStream {
throw new UnsupportedOperationException();
}
/**
* Reads a {@link QueryBuilder} from the current stream
*/
public QueryBuilder readQuery() throws IOException {
return readNamedWriteable(QueryBuilder.class);
}
/**
* Reads a {@link org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder} from the current stream
*/
public ScoreFunctionBuilder<?> readScoreFunction() throws IOException {
return readNamedWriteable(ScoreFunctionBuilder.class);
}
public static StreamInput wrap(BytesReference reference) {
if (reference.hasArray() == false) {
reference = reference.toBytesArray();

View File

@ -31,6 +31,8 @@ import org.elasticsearch.Version;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder;
import org.joda.time.ReadableInstant;
import java.io.EOFException;
@ -315,6 +317,18 @@ public abstract class StreamOutput extends OutputStream {
}
}
/**
* Writes a string array, for nullable string, writes false.
*/
public void writeOptionalStringArray(@Nullable String[] array) throws IOException {
if (array == null) {
writeBoolean(false);
} else {
writeBoolean(true);
writeStringArray(array);
}
}
public void writeMap(@Nullable Map<String, Object> map) throws IOException {
writeGenericValue(map);
}
@ -568,4 +582,18 @@ public abstract class StreamOutput extends OutputStream {
writeString(namedWriteable.getWriteableName());
namedWriteable.writeTo(this);
}
/**
* Writes a {@link QueryBuilder} to the current stream
*/
public void writeQuery(QueryBuilder queryBuilder) throws IOException {
writeNamedWriteable(queryBuilder);
}
/**
* Writes a {@link ScoreFunctionBuilder} to the current stream
*/
public void writeScoreFunction(ScoreFunctionBuilder<?> scoreFunctionBuilder) throws IOException {
writeNamedWriteable(scoreFunctionBuilder);
}
}

View File

@ -158,7 +158,7 @@ public class MoreLikeThisQuery extends Query {
if (this.unlikeText != null || this.unlikeFields != null) {
handleUnlike(mlt, this.unlikeText, this.unlikeFields);
}
return createQuery(mlt);
}
@ -182,7 +182,7 @@ public class MoreLikeThisQuery extends Query {
BooleanQuery bq = bqBuilder.build();
bq.setBoost(getBoost());
return bq;
return bq;
}
private void handleUnlike(XMoreLikeThis mlt, String[] unlikeText, Fields[] unlikeFields) throws IOException {
@ -257,8 +257,8 @@ public class MoreLikeThisQuery extends Query {
this.unlikeFields = unlikeFields;
}
public void setUnlikeText(List<String> unlikeText) {
this.unlikeText = unlikeText.toArray(Strings.EMPTY_ARRAY);
public void setUnlikeText(String[] unlikeText) {
this.unlikeText = unlikeText;
}
public String[] getMoreLikeFields() {

View File

@ -20,19 +20,20 @@
package org.elasticsearch.common.lucene.search.function;
import org.apache.lucene.search.Explanation;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
public enum CombineFunction {
MULT {
import java.io.IOException;
import java.util.Locale;
public enum CombineFunction implements Writeable<CombineFunction> {
MULTIPLY {
@Override
public float combine(double queryScore, double funcScore, double maxBoost) {
return toFloat(queryScore * Math.min(funcScore, maxBoost));
}
@Override
public String getName() {
return "multiply";
}
@Override
public Explanation explain(Explanation queryExpl, Explanation funcExpl, float maxBoost) {
Explanation boostExpl = Explanation.match(maxBoost, "maxBoost");
@ -50,11 +51,6 @@ public enum CombineFunction {
return toFloat(Math.min(funcScore, maxBoost));
}
@Override
public String getName() {
return "replace";
}
@Override
public Explanation explain(Explanation queryExpl, Explanation funcExpl, float maxBoost) {
Explanation boostExpl = Explanation.match(maxBoost, "maxBoost");
@ -71,11 +67,6 @@ public enum CombineFunction {
return toFloat(queryScore + Math.min(funcScore, maxBoost));
}
@Override
public String getName() {
return "sum";
}
@Override
public Explanation explain(Explanation queryExpl, Explanation funcExpl, float maxBoost) {
Explanation minExpl = Explanation.match(Math.min(funcExpl.getValue(), maxBoost), "min of:",
@ -91,11 +82,6 @@ public enum CombineFunction {
return toFloat((Math.min(funcScore, maxBoost) + queryScore) / 2.0);
}
@Override
public String getName() {
return "avg";
}
@Override
public Explanation explain(Explanation queryExpl, Explanation funcExpl, float maxBoost) {
Explanation minExpl = Explanation.match(Math.min(funcExpl.getValue(), maxBoost), "min of:",
@ -112,11 +98,6 @@ public enum CombineFunction {
return toFloat(Math.min(queryScore, Math.min(funcScore, maxBoost)));
}
@Override
public String getName() {
return "min";
}
@Override
public Explanation explain(Explanation queryExpl, Explanation funcExpl, float maxBoost) {
Explanation innerMinExpl = Explanation.match(
@ -134,11 +115,6 @@ public enum CombineFunction {
return toFloat(Math.max(queryScore, Math.min(funcScore, maxBoost)));
}
@Override
public String getName() {
return "max";
}
@Override
public Explanation explain(Explanation queryExpl, Explanation funcExpl, float maxBoost) {
Explanation innerMinExpl = Explanation.match(
@ -153,8 +129,6 @@ public enum CombineFunction {
public abstract float combine(double queryScore, double funcScore, double maxBoost);
public abstract String getName();
public static float toFloat(double input) {
assert deviation(input) <= 0.001 : "input " + input + " out of float scope for function score deviation: " + deviation(input);
return (float) input;
@ -166,4 +140,26 @@ public enum CombineFunction {
}
public abstract Explanation explain(Explanation queryExpl, Explanation funcExpl, float maxBoost);
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeVInt(this.ordinal());
}
@Override
public CombineFunction readFrom(StreamInput in) throws IOException {
int ordinal = in.readVInt();
if (ordinal < 0 || ordinal >= values().length) {
throw new IOException("Unknown CombineFunction ordinal [" + ordinal + "]");
}
return values()[ordinal];
}
public static CombineFunction readCombineFunctionFrom(StreamInput in) throws IOException {
return CombineFunction.MULTIPLY.readFrom(in);
}
public static CombineFunction fromString(String combineFunction) {
return valueOf(combineFunction.toUpperCase(Locale.ROOT));
}
}

View File

@ -22,11 +22,16 @@ package org.elasticsearch.common.lucene.search.function;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.Explanation;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.index.fielddata.FieldData;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
import java.io.IOException;
import java.util.Locale;
import java.util.Objects;
/**
* A function_score function that multiplies the score with the value of a
@ -45,7 +50,7 @@ public class FieldValueFactorFunction extends ScoreFunction {
public FieldValueFactorFunction(String field, float boostFactor, Modifier modifierType, Double missing,
IndexNumericFieldData indexFieldData) {
super(CombineFunction.MULT);
super(CombineFunction.MULTIPLY);
this.field = field;
this.boostFactor = boostFactor;
this.modifier = modifierType;
@ -103,11 +108,19 @@ public class FieldValueFactorFunction extends ScoreFunction {
return false;
}
@Override
protected boolean doEquals(ScoreFunction other) {
FieldValueFactorFunction fieldValueFactorFunction = (FieldValueFactorFunction) other;
return this.boostFactor == fieldValueFactorFunction.boostFactor &&
Objects.equals(this.field, fieldValueFactorFunction.field) &&
Objects.equals(this.modifier, fieldValueFactorFunction.modifier);
}
/**
* The Type class encapsulates the modification types that can be applied
* to the score/value product.
*/
public enum Modifier {
public enum Modifier implements Writeable<Modifier> {
NONE {
@Override
public double apply(double n) {
@ -171,9 +184,31 @@ public class FieldValueFactorFunction extends ScoreFunction {
public abstract double apply(double n);
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeVInt(this.ordinal());
}
public static Modifier readModifierFrom(StreamInput in) throws IOException {
return Modifier.NONE.readFrom(in);
}
@Override
public Modifier readFrom(StreamInput in) throws IOException {
int ordinal = in.readVInt();
if (ordinal < 0 || ordinal >= values().length) {
throw new IOException("Unknown Modifier ordinal [" + ordinal + "]");
}
return values()[ordinal];
}
@Override
public String toString() {
return super.toString().toLowerCase(Locale.ROOT);
}
public static Modifier fromString(String modifier) {
return valueOf(modifier.toUpperCase(Locale.ROOT));
}
}
}

View File

@ -29,14 +29,13 @@ import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Weight;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.ToStringUtils;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.lucene.Lucene;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Locale;
import java.util.Set;
import java.util.*;
/**
* A query that allows for a pluggable boost function / filter. If it matches
@ -55,53 +54,63 @@ public class FiltersFunctionScoreQuery extends Query {
@Override
public boolean equals(Object o) {
if (this == o)
if (this == o) {
return true;
if (o == null || getClass() != o.getClass())
}
if (o == null || getClass() != o.getClass()) {
return false;
}
FilterFunction that = (FilterFunction) o;
if (filter != null ? !filter.equals(that.filter) : that.filter != null)
return false;
if (function != null ? !function.equals(that.function) : that.function != null)
return false;
return true;
return Objects.equals(this.filter, that.filter) && Objects.equals(this.function, that.function);
}
@Override
public int hashCode() {
int result = filter != null ? filter.hashCode() : 0;
result = 31 * result + (function != null ? function.hashCode() : 0);
return result;
return Objects.hash(super.hashCode(), filter, function);
}
}
public static enum ScoreMode {
First, Avg, Max, Sum, Min, Multiply
public enum ScoreMode implements Writeable<ScoreMode> {
FIRST, AVG, MAX, SUM, MIN, MULTIPLY;
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeVInt(this.ordinal());
}
@Override
public ScoreMode readFrom(StreamInput in) throws IOException {
int ordinal = in.readVInt();
if (ordinal < 0 || ordinal >= values().length) {
throw new IOException("Unknown ScoreMode ordinal [" + ordinal + "]");
}
return values()[ordinal];
}
public static ScoreMode readScoreModeFrom(StreamInput in) throws IOException {
return ScoreMode.MULTIPLY.readFrom(in);
}
public static ScoreMode fromString(String scoreMode) {
return valueOf(scoreMode.toUpperCase(Locale.ROOT));
}
}
Query subQuery;
final FilterFunction[] filterFunctions;
final ScoreMode scoreMode;
final float maxBoost;
private Float minScore;
private final Float minScore;
protected CombineFunction combineFunction;
final protected CombineFunction combineFunction;
public FiltersFunctionScoreQuery(Query subQuery, ScoreMode scoreMode, FilterFunction[] filterFunctions, float maxBoost, Float minScore) {
public FiltersFunctionScoreQuery(Query subQuery, ScoreMode scoreMode, FilterFunction[] filterFunctions, float maxBoost, Float minScore, CombineFunction combineFunction) {
this.subQuery = subQuery;
this.scoreMode = scoreMode;
this.filterFunctions = filterFunctions;
this.maxBoost = maxBoost;
combineFunction = CombineFunction.MULT;
this.minScore = minScore;
}
public FiltersFunctionScoreQuery setCombineFunction(CombineFunction combineFunction) {
this.combineFunction = combineFunction;
return this;
this.minScore = minScore;
}
public Query getSubQuery() {
@ -227,35 +236,34 @@ public class FiltersFunctionScoreQuery extends Query {
// filters
double factor = 1.0;
switch (scoreMode) {
case First:
case FIRST:
factor = filterExplanations.get(0).getValue();
break;
case Max:
case MAX:
factor = Double.NEGATIVE_INFINITY;
for (int i = 0; i < filterExplanations.size(); i++) {
factor = Math.max(filterExplanations.get(i).getValue(), factor);
for (Explanation filterExplanation : filterExplanations) {
factor = Math.max(filterExplanation.getValue(), factor);
}
break;
case Min:
case MIN:
factor = Double.POSITIVE_INFINITY;
for (int i = 0; i < filterExplanations.size(); i++) {
factor = Math.min(filterExplanations.get(i).getValue(), factor);
for (Explanation filterExplanation : filterExplanations) {
factor = Math.min(filterExplanation.getValue(), factor);
}
break;
case Multiply:
for (int i = 0; i < filterExplanations.size(); i++) {
factor *= filterExplanations.get(i).getValue();
case MULTIPLY:
for (Explanation filterExplanation : filterExplanations) {
factor *= filterExplanation.getValue();
}
break;
default: // Avg / Total
default:
double totalFactor = 0.0f;
for (int i = 0; i < filterExplanations.size(); i++) {
totalFactor += filterExplanations.get(i).getValue();
for (Explanation filterExplanation : filterExplanations) {
totalFactor += filterExplanation.getValue();
}
if (weightSum != 0) {
factor = totalFactor;
if (scoreMode == ScoreMode.Avg) {
if (scoreMode == ScoreMode.AVG) {
factor /= weightSum;
}
}
@ -293,58 +301,64 @@ public class FiltersFunctionScoreQuery extends Query {
// be costly to call score(), so we explicitly check if scores
// are needed
float subQueryScore = needsScores ? scorer.score() : 0f;
if (scoreMode == ScoreMode.First) {
for (int i = 0; i < filterFunctions.length; i++) {
if (docSets[i].get(docId)) {
factor = functions[i].score(docId, subQueryScore);
break;
}
}
} else if (scoreMode == ScoreMode.Max) {
double maxFactor = Double.NEGATIVE_INFINITY;
for (int i = 0; i < filterFunctions.length; i++) {
if (docSets[i].get(docId)) {
maxFactor = Math.max(functions[i].score(docId, subQueryScore), maxFactor);
}
}
if (maxFactor != Float.NEGATIVE_INFINITY) {
factor = maxFactor;
}
} else if (scoreMode == ScoreMode.Min) {
double minFactor = Double.POSITIVE_INFINITY;
for (int i = 0; i < filterFunctions.length; i++) {
if (docSets[i].get(docId)) {
minFactor = Math.min(functions[i].score(docId, subQueryScore), minFactor);
}
}
if (minFactor != Float.POSITIVE_INFINITY) {
factor = minFactor;
}
} else if (scoreMode == ScoreMode.Multiply) {
for (int i = 0; i < filterFunctions.length; i++) {
if (docSets[i].get(docId)) {
factor *= functions[i].score(docId, subQueryScore);
}
}
} else { // Avg / Total
double totalFactor = 0.0f;
float weightSum = 0;
for (int i = 0; i < filterFunctions.length; i++) {
if (docSets[i].get(docId)) {
totalFactor += functions[i].score(docId, subQueryScore);
if (filterFunctions[i].function instanceof WeightFactorFunction) {
weightSum+= ((WeightFactorFunction)filterFunctions[i].function).getWeight();
} else {
weightSum++;
switch(scoreMode) {
case FIRST:
for (int i = 0; i < filterFunctions.length; i++) {
if (docSets[i].get(docId)) {
factor = functions[i].score(docId, subQueryScore);
break;
}
}
}
if (weightSum != 0) {
factor = totalFactor;
if (scoreMode == ScoreMode.Avg) {
factor /= weightSum;
break;
case MAX:
double maxFactor = Double.NEGATIVE_INFINITY;
for (int i = 0; i < filterFunctions.length; i++) {
if (docSets[i].get(docId)) {
maxFactor = Math.max(functions[i].score(docId, subQueryScore), maxFactor);
}
}
}
if (maxFactor != Float.NEGATIVE_INFINITY) {
factor = maxFactor;
}
break;
case MIN:
double minFactor = Double.POSITIVE_INFINITY;
for (int i = 0; i < filterFunctions.length; i++) {
if (docSets[i].get(docId)) {
minFactor = Math.min(functions[i].score(docId, subQueryScore), minFactor);
}
}
if (minFactor != Float.POSITIVE_INFINITY) {
factor = minFactor;
}
break;
case MULTIPLY:
for (int i = 0; i < filterFunctions.length; i++) {
if (docSets[i].get(docId)) {
factor *= functions[i].score(docId, subQueryScore);
}
}
break;
default: // Avg / Total
double totalFactor = 0.0f;
float weightSum = 0;
for (int i = 0; i < filterFunctions.length; i++) {
if (docSets[i].get(docId)) {
totalFactor += functions[i].score(docId, subQueryScore);
if (filterFunctions[i].function instanceof WeightFactorFunction) {
weightSum+= ((WeightFactorFunction)filterFunctions[i].function).getWeight();
} else {
weightSum++;
}
}
}
if (weightSum != 0) {
factor = totalFactor;
if (scoreMode == ScoreMode.AVG) {
factor /= weightSum;
}
}
break;
}
return scoreCombiner.combine(subQueryScore, factor, maxBoost);
}
@ -364,19 +378,20 @@ public class FiltersFunctionScoreQuery extends Query {
@Override
public boolean equals(Object o) {
if (o == null || getClass() != o.getClass())
return false;
FiltersFunctionScoreQuery other = (FiltersFunctionScoreQuery) o;
if (this.getBoost() != other.getBoost())
return false;
if (!this.subQuery.equals(other.subQuery)) {
if (this == o) {
return true;
}
if (super.equals(o) == false) {
return false;
}
return Arrays.equals(this.filterFunctions, other.filterFunctions);
FiltersFunctionScoreQuery other = (FiltersFunctionScoreQuery) o;
return Objects.equals(this.subQuery, other.subQuery) && this.maxBoost == other.maxBoost &&
Objects.equals(this.combineFunction, other.combineFunction) && Objects.equals(this.minScore, other.minScore) &&
Arrays.equals(this.filterFunctions, other.filterFunctions);
}
@Override
public int hashCode() {
return subQuery.hashCode() + 31 * Arrays.hashCode(filterFunctions) ^ Float.floatToIntBits(getBoost());
return Objects.hash(super.hashCode(), subQuery, maxBoost, combineFunction, minScore, filterFunctions);
}
}

View File

@ -23,7 +23,6 @@ import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.*;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.ToStringUtils;
import java.io.IOException;
@ -35,31 +34,27 @@ import java.util.Set;
*/
public class FunctionScoreQuery extends Query {
public static final float DEFAULT_MAX_BOOST = Float.MAX_VALUE;
Query subQuery;
final ScoreFunction function;
float maxBoost = Float.MAX_VALUE;
CombineFunction combineFunction;
private Float minScore = null;
final float maxBoost;
final CombineFunction combineFunction;
private Float minScore;
public FunctionScoreQuery(Query subQuery, ScoreFunction function, Float minScore) {
public FunctionScoreQuery(Query subQuery, ScoreFunction function, Float minScore, CombineFunction combineFunction, float maxBoost) {
this.subQuery = subQuery;
this.function = function;
this.combineFunction = function == null? CombineFunction.MULT : function.getDefaultScoreCombiner();
this.combineFunction = combineFunction;
this.minScore = minScore;
this.maxBoost = maxBoost;
}
public FunctionScoreQuery(Query subQuery, ScoreFunction function) {
this.subQuery = subQuery;
this.function = function;
this.combineFunction = function.getDefaultScoreCombiner();
}
public void setCombineFunction(CombineFunction combineFunction) {
this.combineFunction = combineFunction;
}
public void setMaxBoost(float maxBoost) {
this.maxBoost = maxBoost;
this.maxBoost = DEFAULT_MAX_BOOST;
}
public float getMaxBoost() {
@ -193,15 +188,20 @@ public class FunctionScoreQuery extends Query {
@Override
public boolean equals(Object o) {
if (o == null || getClass() != o.getClass())
if (this == o) {
return true;
}
if (super.equals(o) == false) {
return false;
}
FunctionScoreQuery other = (FunctionScoreQuery) o;
return this.getBoost() == other.getBoost() && this.subQuery.equals(other.subQuery) && (this.function != null ? this.function.equals(other.function) : other.function == null)
&& this.maxBoost == other.maxBoost;
return Objects.equals(this.subQuery, other.subQuery) && Objects.equals(this.function, other.function)
&& Objects.equals(this.combineFunction, other.combineFunction)
&& Objects.equals(this.minScore, other.minScore) && this.maxBoost == other.maxBoost;
}
@Override
public int hashCode() {
return subQuery.hashCode() + 31 * Objects.hashCode(function) ^ Float.floatToIntBits(getBoost());
return Objects.hash(super.hashCode(), subQuery.hashCode(), function, combineFunction, minScore, maxBoost);
}
}

View File

@ -38,7 +38,7 @@ public class RandomScoreFunction extends ScoreFunction {
* Default constructor. Only useful for constructing as a placeholder, but should not be used for actual scoring.
*/
public RandomScoreFunction() {
super(CombineFunction.MULT);
super(CombineFunction.MULTIPLY);
uidFieldData = null;
}
@ -50,7 +50,7 @@ public class RandomScoreFunction extends ScoreFunction {
* @param uidFieldData The field data for _uid to use for generating consistent random values for the same id
*/
public RandomScoreFunction(int seed, int salt, IndexFieldData<?> uidFieldData) {
super(CombineFunction.MULT);
super(CombineFunction.MULTIPLY);
this.originalSeed = seed;
this.saltedSeed = seed ^ salt;
this.uidFieldData = uidFieldData;
@ -85,4 +85,11 @@ public class RandomScoreFunction extends ScoreFunction {
public boolean needsScores() {
return false;
}
@Override
protected boolean doEquals(ScoreFunction other) {
RandomScoreFunction randomScoreFunction = (RandomScoreFunction) other;
return this.originalSeed == randomScoreFunction.originalSeed &&
this.saltedSeed == randomScoreFunction.saltedSeed;
}
}

View File

@ -22,6 +22,7 @@ package org.elasticsearch.common.lucene.search.function;
import org.apache.lucene.index.LeafReaderContext;
import java.io.IOException;
import java.util.Objects;
/**
*
@ -46,4 +47,23 @@ public abstract class ScoreFunction {
* @return {@code true} if scores are needed.
*/
public abstract boolean needsScores();
@Override
public final boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
ScoreFunction other = (ScoreFunction) obj;
return Objects.equals(scoreCombiner, other.scoreCombiner) &&
doEquals(other);
}
/**
* Indicates whether some other {@link ScoreFunction} object of the same type is "equal to" this one.
*/
protected abstract boolean doEquals(ScoreFunction other);
}

View File

@ -29,6 +29,7 @@ import org.elasticsearch.script.ScriptException;
import org.elasticsearch.script.SearchScript;
import java.io.IOException;
import java.util.Objects;
public class ScriptScoreFunction extends ScoreFunction {
@ -136,4 +137,9 @@ public class ScriptScoreFunction extends ScoreFunction {
return "script" + sScript.toString();
}
@Override
protected boolean doEquals(ScoreFunction other) {
ScriptScoreFunction scriptScoreFunction = (ScriptScoreFunction) other;
return Objects.equals(this.sScript, scriptScoreFunction.sScript);
}
}

View File

@ -23,18 +23,19 @@ import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.Explanation;
import java.io.IOException;
import java.util.Objects;
/**
*
*/
public class WeightFactorFunction extends ScoreFunction {
private static final ScoreFunction SCORE_ONE = new ScoreOne(CombineFunction.MULT);
private static final ScoreFunction SCORE_ONE = new ScoreOne(CombineFunction.MULTIPLY);
private final ScoreFunction scoreFunction;
private float weight = 1.0f;
public WeightFactorFunction(float weight, ScoreFunction scoreFunction) {
super(CombineFunction.MULT);
super(CombineFunction.MULTIPLY);
if (scoreFunction == null) {
this.scoreFunction = SCORE_ONE;
} else {
@ -44,7 +45,7 @@ public class WeightFactorFunction extends ScoreFunction {
}
public WeightFactorFunction(float weight) {
super(CombineFunction.MULT);
super(CombineFunction.MULTIPLY);
this.scoreFunction = SCORE_ONE;
this.weight = weight;
}
@ -81,6 +82,17 @@ public class WeightFactorFunction extends ScoreFunction {
return weight;
}
public ScoreFunction getScoreFunction() {
return scoreFunction;
}
@Override
protected boolean doEquals(ScoreFunction other) {
WeightFactorFunction weightFactorFunction = (WeightFactorFunction) other;
return this.weight == weightFactorFunction.weight &&
Objects.equals(this.scoreFunction, weightFactorFunction.scoreFunction);
}
private static class ScoreOne extends ScoreFunction {
protected ScoreOne(CombineFunction scoreCombiner) {
@ -106,5 +118,10 @@ public class WeightFactorFunction extends ScoreFunction {
public boolean needsScores() {
return false;
}
@Override
protected boolean doEquals(ScoreFunction other) {
return true;
}
}
}

View File

@ -22,6 +22,8 @@ package org.elasticsearch.common.path;
import com.google.common.collect.ImmutableMap;
import org.elasticsearch.common.Strings;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import static org.elasticsearch.common.collect.MapBuilder.newMapBuilder;
@ -195,7 +197,7 @@ public class PathTrie<T> {
private void put(Map<String, String> params, TrieNode<T> node, String value) {
if (params != null && node.isNamedWildcard()) {
params.put(node.namedWildcard(), decoder.decode(value));
params.put(node.namedWildcard(), value);
}
}
}
@ -222,7 +224,7 @@ public class PathTrie<T> {
if (path.length() == 0) {
return rootValue;
}
String[] strings = Strings.splitStringToArray(path, separator);
String[] strings = splitPath(decoder.decode(path));
if (strings.length == 0) {
return rootValue;
}
@ -233,4 +235,50 @@ public class PathTrie<T> {
}
return root.retrieve(strings, index, params);
}
/*
Splits up the url path up by '/' and is aware of
index name expressions that appear between '<' and '>'.
*/
String[] splitPath(final String path) {
if (path == null || path.length() == 0) {
return Strings.EMPTY_ARRAY;
}
int count = 1;
boolean splitAllowed = true;
for (int i = 0; i < path.length(); i++) {
final char currentC = path.charAt(i);
if ('<' == currentC) {
splitAllowed = false;
} else if (currentC == '>') {
splitAllowed = true;
} else if (splitAllowed && currentC == separator) {
count++;
}
}
final List<String> result = new ArrayList<>(count);
final StringBuilder builder = new StringBuilder();
splitAllowed = true;
for (int i = 0; i < path.length(); i++) {
final char currentC = path.charAt(i);
if ('<' == currentC) {
splitAllowed = false;
} else if (currentC == '>') {
splitAllowed = true;
} else if (splitAllowed && currentC == separator) {
if (builder.length() > 0) {
result.add(builder.toString());
builder.setLength(0);
}
continue;
}
builder.append(currentC);
}
if (builder.length() > 0) {
result.add(builder.toString());
}
return result.toArray(new String[result.size()]);
}
}

View File

@ -19,19 +19,24 @@
package org.elasticsearch.common.unit;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentBuilderString;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.Locale;
import java.util.Objects;
/**
* A unit class that encapsulates all in-exact search
* parsing and conversion from similarities to edit distances
* etc.
*/
public final class Fuzziness implements ToXContent {
public final class Fuzziness implements ToXContent, Writeable<Fuzziness> {
public static final XContentBuilderString X_FIELD_NAME = new XContentBuilderString("fuzziness");
public static final Fuzziness ZERO = new Fuzziness(0);
@ -42,6 +47,10 @@ public final class Fuzziness implements ToXContent {
private final String fuzziness;
/** the prototype constant is intended for deserialization when used with
* {@link org.elasticsearch.common.io.stream.StreamableReader#readFrom(StreamInput)} */
static final Fuzziness PROTOTYPE = AUTO;
private Fuzziness(int fuzziness) {
if (fuzziness != 0 && fuzziness != 1 && fuzziness != 2) {
throw new IllegalArgumentException("Valid edit distances are [0, 1, 2] but was [" + fuzziness + "]");
@ -50,7 +59,10 @@ public final class Fuzziness implements ToXContent {
}
private Fuzziness(String fuzziness) {
this.fuzziness = fuzziness;
if (fuzziness == null) {
throw new IllegalArgumentException("fuzziness can't be null!");
}
this.fuzziness = fuzziness.toUpperCase(Locale.ROOT);
}
/**
@ -120,7 +132,7 @@ public final class Fuzziness implements ToXContent {
}
public int asDistance(String text) {
if (this == AUTO) { //AUTO
if (this.equals(AUTO)) { //AUTO
final int len = termLen(text);
if (len <= 2) {
return 0;
@ -134,7 +146,7 @@ public final class Fuzziness implements ToXContent {
}
public TimeValue asTimeValue() {
if (this == AUTO) {
if (this.equals(AUTO)) {
return TimeValue.timeValueMillis(1);
} else {
return TimeValue.parseTimeValue(fuzziness.toString(), null, "fuzziness");
@ -142,7 +154,7 @@ public final class Fuzziness implements ToXContent {
}
public long asLong() {
if (this == AUTO) {
if (this.equals(AUTO)) {
return 1;
}
try {
@ -153,7 +165,7 @@ public final class Fuzziness implements ToXContent {
}
public int asInt() {
if (this == AUTO) {
if (this.equals(AUTO)) {
return 1;
}
try {
@ -164,7 +176,7 @@ public final class Fuzziness implements ToXContent {
}
public short asShort() {
if (this == AUTO) {
if (this.equals(AUTO)) {
return 1;
}
try {
@ -175,7 +187,7 @@ public final class Fuzziness implements ToXContent {
}
public byte asByte() {
if (this == AUTO) {
if (this.equals(AUTO)) {
return 1;
}
try {
@ -186,14 +198,14 @@ public final class Fuzziness implements ToXContent {
}
public double asDouble() {
if (this == AUTO) {
if (this.equals(AUTO)) {
return 1d;
}
return Double.parseDouble(fuzziness.toString());
}
public float asFloat() {
if (this == AUTO) {
if (this.equals(AUTO)) {
return 1f;
}
return Float.parseFloat(fuzziness.toString());
@ -206,4 +218,35 @@ public final class Fuzziness implements ToXContent {
public String asString() {
return fuzziness.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
Fuzziness other = (Fuzziness) obj;
return Objects.equals(fuzziness, other.fuzziness);
}
@Override
public int hashCode() {
return fuzziness.hashCode();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(fuzziness);
}
@Override
public Fuzziness readFrom(StreamInput in) throws IOException {
return new Fuzziness(in.readString());
}
public static Fuzziness readFuzzinessFrom(StreamInput in) throws IOException {
return PROTOTYPE.readFrom(in);
}
}

View File

@ -21,7 +21,6 @@ package org.elasticsearch.common.xcontent;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.index.Index;
import java.io.IOException;
import java.util.*;
@ -130,7 +129,7 @@ public final class ObjectParser<Value, Context> implements BiFunction<XContentPa
try {
fieldParser.parser.parse(parser, value, context);
} catch (Exception ex) {
throw new ParsingException(new Index("_na_"), parser, "[" + name + "] failed to parse field [" + currentFieldName + "]", ex);
throw new ParsingException(parser.getTokenLocation(), "[" + name + "] failed to parse field [" + currentFieldName + "]", ex);
}
}
@ -172,7 +171,7 @@ public final class ObjectParser<Value, Context> implements BiFunction<XContentPa
try {
return parse(parser, valueSupplier.get(), context);
} catch (IOException e) {
throw new ParsingException(new Index("_na_"), parser, "[" + name + "] failed to parse object", e);
throw new ParsingException(parser.getTokenLocation(), "[" + name + "] failed to parse object", e);
}
}

View File

@ -18,12 +18,17 @@
*/
package org.elasticsearch.index;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.lucene.uid.Versions;
import java.io.IOException;
/**
*
*/
public enum VersionType {
public enum VersionType implements Writeable<VersionType> {
INTERNAL((byte) 0) {
@Override
public boolean isVersionConflictForWrites(long currentVersion, long expectedVersion) {
@ -219,6 +224,8 @@ public enum VersionType {
private final byte value;
private static final VersionType PROTOTYPE = INTERNAL;
VersionType(byte value) {
this.value = value;
}
@ -304,4 +311,20 @@ public enum VersionType {
}
throw new IllegalArgumentException("No version type match [" + value + "]");
}
@Override
public VersionType readFrom(StreamInput in) throws IOException {
int ordinal = in.readVInt();
assert (ordinal == 0 || ordinal == 1 || ordinal == 2 || ordinal == 3);
return VersionType.values()[ordinal];
}
public static VersionType readVersionTypeFrom(StreamInput in) throws IOException {
return PROTOTYPE.readFrom(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeVInt(ordinal());
}
}

View File

@ -33,7 +33,7 @@ import org.elasticsearch.common.lucene.BytesRefs;
import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.similarity.SimilarityProvider;
import java.io.IOException;
@ -437,7 +437,7 @@ public abstract class MappedFieldType extends FieldType {
}
/**
* Should the field query {@link #termQuery(Object, org.elasticsearch.index.query.QueryParseContext)} be used when detecting this
* Should the field query {@link #termQuery(Object, org.elasticsearch.index.query.QueryShardContext)} be used when detecting this
* field in query string.
*/
public boolean useTermQueryWithQueryString() {
@ -449,11 +449,11 @@ public abstract class MappedFieldType extends FieldType {
return new Term(names().indexName(), indexedValueForSearch(value));
}
public Query termQuery(Object value, @Nullable QueryParseContext context) {
public Query termQuery(Object value, @Nullable QueryShardContext context) {
return new TermQuery(createTerm(value));
}
public Query termsQuery(List values, @Nullable QueryParseContext context) {
public Query termsQuery(List values, @Nullable QueryShardContext context) {
BytesRef[] bytesRefs = new BytesRef[values.size()];
for (int i = 0; i < bytesRefs.length; i++) {
bytesRefs[i] = indexedValueForSearch(values.get(i));
@ -472,7 +472,7 @@ public abstract class MappedFieldType extends FieldType {
return new FuzzyQuery(createTerm(value), fuzziness.asDistance(BytesRefs.toString(value)), prefixLength, maxExpansions, transpositions);
}
public Query prefixQuery(String value, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryParseContext context) {
public Query prefixQuery(String value, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryShardContext context) {
PrefixQuery query = new PrefixQuery(createTerm(value));
if (method != null) {
query.setRewriteMethod(method);
@ -480,7 +480,7 @@ public abstract class MappedFieldType extends FieldType {
return query;
}
public Query regexpQuery(String value, int flags, int maxDeterminizedStates, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryParseContext context) {
public Query regexpQuery(String value, int flags, int maxDeterminizedStates, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryShardContext context) {
RegexpQuery query = new RegexpQuery(createTerm(value), flags, maxDeterminizedStates);
if (method != null) {
query.setRewriteMethod(method);

View File

@ -125,6 +125,7 @@ public class GeoShapeFieldMapper extends FieldMapper {
super(name, Defaults.FIELD_TYPE);
}
@Override
public GeoShapeFieldType fieldType() {
return (GeoShapeFieldType)fieldType;
}
@ -400,6 +401,10 @@ public class GeoShapeFieldMapper extends FieldMapper {
return this.defaultStrategy;
}
public PrefixTreeStrategy resolveStrategy(SpatialStrategy strategy) {
return resolveStrategy(strategy.getStrategyName());
}
public PrefixTreeStrategy resolveStrategy(String strategyName) {
if (SpatialStrategy.RECURSIVE.getStrategyName().equals(strategyName)) {
recursiveStrategy.setPointsOnly(pointsOnly());

View File

@ -40,7 +40,7 @@ import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.MetadataFieldMapper;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.similarity.SimilarityLookupService;
import java.io.IOException;
@ -186,7 +186,7 @@ public class AllFieldMapper extends MetadataFieldMapper {
}
@Override
public Query termQuery(Object value, QueryParseContext context) {
public Query termQuery(Object value, QueryShardContext context) {
return queryStringTermQuery(createTerm(value));
}
}

View File

@ -24,7 +24,12 @@ import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.Term;
import org.apache.lucene.queries.TermsQuery;
import org.apache.lucene.search.*;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.MultiTermQuery;
import org.apache.lucene.search.PrefixQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.RegexpQuery;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version;
import org.elasticsearch.common.Nullable;
@ -36,8 +41,15 @@ import org.elasticsearch.common.util.iterable.Iterables;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.*;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.MetadataFieldMapper;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.index.query.QueryShardContext;
import java.io.IOException;
import java.util.Collection;
@ -48,7 +60,7 @@ import java.util.Map;
import static org.elasticsearch.index.mapper.core.TypeParsers.parseField;
/**
*
*
*/
public class IdFieldMapper extends MetadataFieldMapper {
@ -155,7 +167,7 @@ public class IdFieldMapper extends MetadataFieldMapper {
}
@Override
public Query termQuery(Object value, @Nullable QueryParseContext context) {
public Query termQuery(Object value, @Nullable QueryShardContext context) {
if (indexOptions() != IndexOptions.NONE || context == null) {
return super.termQuery(value, context);
}
@ -164,7 +176,7 @@ public class IdFieldMapper extends MetadataFieldMapper {
}
@Override
public Query termsQuery(List values, @Nullable QueryParseContext context) {
public Query termsQuery(List values, @Nullable QueryShardContext context) {
if (indexOptions() != IndexOptions.NONE || context == null) {
return super.termsQuery(values, context);
}
@ -172,7 +184,7 @@ public class IdFieldMapper extends MetadataFieldMapper {
}
@Override
public Query prefixQuery(String value, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryParseContext context) {
public Query prefixQuery(String value, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryShardContext context) {
if (indexOptions() != IndexOptions.NONE || context == null) {
return super.prefixQuery(value, method, context);
}
@ -189,7 +201,7 @@ public class IdFieldMapper extends MetadataFieldMapper {
}
@Override
public Query regexpQuery(String value, int flags, int maxDeterminizedStates, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryParseContext context) {
public Query regexpQuery(String value, int flags, int maxDeterminizedStates, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryShardContext context) {
if (indexOptions() != IndexOptions.NONE || context == null) {
return super.regexpQuery(value, flags, maxDeterminizedStates, method, context);
}
@ -224,7 +236,7 @@ public class IdFieldMapper extends MetadataFieldMapper {
super(NAME, fieldType, Defaults.FIELD_TYPE, indexSettings);
this.path = path;
}
private static MappedFieldType idFieldType(Settings indexSettings, MappedFieldType existing) {
if (existing != null) {
return existing.clone();

View File

@ -38,7 +38,7 @@ import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.MetadataFieldMapper;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext;
import java.io.IOException;
import java.util.Iterator;
@ -157,7 +157,7 @@ public class IndexFieldMapper extends MetadataFieldMapper {
* indices
*/
@Override
public Query termQuery(Object value, @Nullable QueryParseContext context) {
public Query termQuery(Object value, @Nullable QueryShardContext context) {
if (context == null) {
return super.termQuery(value, context);
}
@ -171,7 +171,7 @@ public class IndexFieldMapper extends MetadataFieldMapper {
@Override
public Query termsQuery(List values, QueryParseContext context) {
public Query termsQuery(List values, QueryShardContext context) {
if (context == null) {
return super.termsQuery(values, context);
}

View File

@ -34,8 +34,16 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.loader.SettingsLoader;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.*;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.MetadataFieldMapper;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.index.query.QueryShardContext;
import java.io.IOException;
import java.util.ArrayList;
@ -202,12 +210,12 @@ public class ParentFieldMapper extends MetadataFieldMapper {
}
@Override
public Query termQuery(Object value, @Nullable QueryParseContext context) {
public Query termQuery(Object value, @Nullable QueryShardContext context) {
return termsQuery(Collections.singletonList(value), context);
}
@Override
public Query termsQuery(List values, @Nullable QueryParseContext context) {
public Query termsQuery(List values, @Nullable QueryShardContext context) {
if (context == null) {
return super.termsQuery(values, context);
}

View File

@ -43,7 +43,7 @@ import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.MetadataFieldMapper;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext;
import java.io.IOException;
import java.util.List;
@ -137,7 +137,7 @@ public class TypeFieldMapper extends MetadataFieldMapper {
}
@Override
public Query termQuery(Object value, @Nullable QueryParseContext context) {
public Query termQuery(Object value, @Nullable QueryShardContext context) {
if (indexOptions() == IndexOptions.NONE) {
return new ConstantScoreQuery(new PrefixQuery(new Term(UidFieldMapper.NAME, Uid.typePrefixAsBytes(BytesRefs.toBytesRef(value)))));
}

View File

@ -25,6 +25,7 @@ import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
@ -42,8 +43,7 @@ import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.internal.TypeFieldMapper;
import org.elasticsearch.index.percolator.stats.ShardPercolateService;
import org.elasticsearch.index.query.IndexQueryParserService;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.settings.IndexSettings;
import org.elasticsearch.index.shard.AbstractIndexShardComponent;
import org.elasticsearch.index.shard.IndexShard;
@ -187,9 +187,9 @@ public class PercolatorQueriesRegistry extends AbstractIndexShardComponent imple
private Query parseQuery(String type, XContentParser parser) {
String[] previousTypes = null;
if (type != null) {
QueryParseContext.setTypesWithPrevious(new String[]{type});
QueryShardContext.setTypesWithPrevious(new String[]{type});
}
QueryParseContext context = queryParserService.getParseContext();
QueryShardContext context = queryParserService.getShardContext();
try {
context.reset(parser);
// This means that fields in the query need to exist in the mapping prior to registering this query
@ -205,13 +205,13 @@ public class PercolatorQueriesRegistry extends AbstractIndexShardComponent imple
// if index.percolator.map_unmapped_fields_as_string is set to true, query can contain unmapped fields which will be mapped
// as an analyzed string.
context.setAllowUnmappedFields(false);
context.setMapUnmappedFieldAsString(mapUnmappedFieldsAsString ? true : false);
context.setMapUnmappedFieldAsString(mapUnmappedFieldsAsString);
return queryParserService.parseInnerQuery(context);
} catch (IOException e) {
throw new ParsingException(context, "Failed to parse", e);
throw new ParsingException(parser.getTokenLocation(), "Failed to parse", e);
} finally {
if (type != null) {
QueryParseContext.setTypes(previousTypes);
QueryShardContext.setTypes(previousTypes);
}
context.reset(null);
}

View File

@ -0,0 +1,266 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.query;
import org.apache.lucene.search.Query;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.action.support.ToXContentToBytes;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.lucene.BytesRefs;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentType;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Objects;
/**
* Base class for all classes producing lucene queries.
* Supports conversion to BytesReference and creation of lucene Query objects.
*/
public abstract class AbstractQueryBuilder<QB extends AbstractQueryBuilder> extends ToXContentToBytes implements QueryBuilder<QB> {
/** Default for boost to apply to resulting Lucene query. Defaults to 1.0*/
public static final float DEFAULT_BOOST = 1.0f;
public static final ParseField NAME_FIELD = new ParseField("_name");
public static final ParseField BOOST_FIELD = new ParseField("boost");
protected String queryName;
protected float boost = DEFAULT_BOOST;
protected AbstractQueryBuilder() {
super(XContentType.JSON);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
doXContent(builder, params);
builder.endObject();
return builder;
}
protected abstract void doXContent(XContentBuilder builder, Params params) throws IOException;
protected void printBoostAndQueryName(XContentBuilder builder) throws IOException {
builder.field("boost", boost);
if (queryName != null) {
builder.field("_name", queryName);
}
}
@Override
public final Query toQuery(QueryShardContext context) throws IOException {
Query query = doToQuery(context);
if (query != null) {
setFinalBoost(query);
if (queryName != null) {
context.addNamedQuery(queryName, query);
}
}
return query;
}
/**
* Sets the main boost to the query obtained by converting the current query into a lucene query.
* The default behaviour is to set the main boost, after verifying that we are not overriding any non default boost
* value that was previously set to the lucene query. That case would require some manual decision on how to combine
* the main boost with the boost coming from lucene by overriding this method.
* @throws IllegalStateException if the lucene query boost has already been set
*/
protected void setFinalBoost(Query query) {
if (query.getBoost() != AbstractQueryBuilder.DEFAULT_BOOST) {
throw new IllegalStateException("lucene query boost is already set, override setFinalBoost to define how to combine lucene boost with main boost");
}
query.setBoost(boost);
}
@Override
public final Query toFilter(QueryShardContext context) throws IOException {
Query result = null;
final boolean originalIsFilter = context.isFilter;
try {
context.isFilter = true;
result = toQuery(context);
} finally {
context.isFilter = originalIsFilter;
}
return result;
}
protected abstract Query doToQuery(QueryShardContext context) throws IOException;
/**
* Returns the query name for the query.
*/
@SuppressWarnings("unchecked")
@Override
public final QB queryName(String queryName) {
this.queryName = queryName;
return (QB) this;
}
/**
* Sets the query name for the query.
*/
@Override
public final String queryName() {
return queryName;
}
/**
* Returns the boost for this query.
*/
@Override
public final float boost() {
return this.boost;
}
/**
* Sets the boost for this query. Documents matching this query will (in addition to the normal
* weightings) have their score multiplied by the boost provided.
*/
@SuppressWarnings("unchecked")
@Override
public final QB boost(float boost) {
this.boost = boost;
return (QB) this;
}
@Override
public final QB readFrom(StreamInput in) throws IOException {
QB queryBuilder = doReadFrom(in);
queryBuilder.boost = in.readFloat();
queryBuilder.queryName = in.readOptionalString();
return queryBuilder;
}
protected abstract QB doReadFrom(StreamInput in) throws IOException;
@Override
public final void writeTo(StreamOutput out) throws IOException {
doWriteTo(out);
out.writeFloat(boost);
out.writeOptionalString(queryName);
}
protected abstract void doWriteTo(StreamOutput out) throws IOException;
protected final QueryValidationException addValidationError(String validationError, QueryValidationException validationException) {
return QueryValidationException.addValidationError(getName(), validationError, validationException);
}
@Override
public final boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
@SuppressWarnings("unchecked")
QB other = (QB) obj;
return Objects.equals(queryName, other.queryName) &&
Objects.equals(boost, other.boost) &&
doEquals(other);
}
/**
* Indicates whether some other {@link QueryBuilder} object of the same type is "equal to" this one.
*/
protected abstract boolean doEquals(QB other);
@Override
public final int hashCode() {
return Objects.hash(getClass(), queryName, boost, doHashCode());
}
protected abstract int doHashCode();
/**
* This helper method checks if the object passed in is a string, if so it
* converts it to a {@link BytesRef}.
* @param obj the input object
* @return the same input object or a {@link BytesRef} representation if input was of type string
*/
protected static Object convertToBytesRefIfString(Object obj) {
if (obj instanceof String) {
return BytesRefs.toBytesRef(obj);
}
return obj;
}
/**
* This helper method checks if the object passed in is a {@link BytesRef}, if so it
* converts it to a utf8 string.
* @param obj the input object
* @return the same input object or a utf8 string if input was of type {@link BytesRef}
*/
protected static Object convertToStringIfBytesRef(Object obj) {
if (obj instanceof BytesRef) {
return ((BytesRef) obj).utf8ToString();
}
return obj;
}
/**
* Helper method to convert collection of {@link QueryBuilder} instances to lucene
* {@link Query} instances. {@link QueryBuilder} that return <tt>null</tt> calling
* their {@link QueryBuilder#toQuery(QueryShardContext)} method are not added to the
* resulting collection.
*/
protected static Collection<Query> toQueries(Collection<QueryBuilder> queryBuilders, QueryShardContext context) throws QueryShardException,
IOException {
List<Query> queries = new ArrayList<>(queryBuilders.size());
for (QueryBuilder queryBuilder : queryBuilders) {
Query query = queryBuilder.toQuery(context);
if (query != null) {
queries.add(query);
}
}
return queries;
}
@Override
public String getName() {
//default impl returns the same as writeable name, but we keep the distinction between the two just to make sure
return getWriteableName();
}
protected final void writeQueries(StreamOutput out, List<? extends QueryBuilder> queries) throws IOException {
out.writeVInt(queries.size());
for (QueryBuilder query : queries) {
out.writeQuery(query);
}
}
protected final List<QueryBuilder> readQueries(StreamInput in) throws IOException {
List<QueryBuilder> queries = new ArrayList<>();
int size = in.readVInt();
for (int i = 0; i < size; i++) {
queries.add(in.readQuery());
}
return queries;
}
}

View File

@ -0,0 +1,165 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.query;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder;
import java.io.IOException;
import java.util.Objects;
public abstract class BaseTermQueryBuilder<QB extends BaseTermQueryBuilder<QB>> extends AbstractQueryBuilder<QB> {
/** Name of field to match against. */
protected final String fieldName;
/** Value to find matches for. */
protected final Object value;
/**
* Constructs a new base term query.
*
* @param fieldName The name of the field
* @param value The value of the term
*/
public BaseTermQueryBuilder(String fieldName, String value) {
this(fieldName, (Object) value);
}
/**
* Constructs a new base term query.
*
* @param fieldName The name of the field
* @param value The value of the term
*/
public BaseTermQueryBuilder(String fieldName, int value) {
this(fieldName, (Object) value);
}
/**
* Constructs a new base term query.
*
* @param fieldName The name of the field
* @param value The value of the term
*/
public BaseTermQueryBuilder(String fieldName, long value) {
this(fieldName, (Object) value);
}
/**
* Constructs a new base term query.
*
* @param fieldName The name of the field
* @param value The value of the term
*/
public BaseTermQueryBuilder(String fieldName, float value) {
this(fieldName, (Object) value);
}
/**
* Constructs a new base term query.
*
* @param fieldName The name of the field
* @param value The value of the term
*/
public BaseTermQueryBuilder(String fieldName, double value) {
this(fieldName, (Object) value);
}
/**
* Constructs a new base term query.
*
* @param fieldName The name of the field
* @param value The value of the term
*/
public BaseTermQueryBuilder(String fieldName, boolean value) {
this(fieldName, (Object) value);
}
/**
* Constructs a new base term query.
* In case value is assigned to a string, we internally convert it to a {@link BytesRef}
* because in {@link TermQueryParser} and {@link SpanTermQueryParser} string values are parsed to {@link BytesRef}
* and we want internal representation of query to be equal regardless of whether it was created from XContent or via Java API.
*
* @param fieldName The name of the field
* @param value The value of the term
*/
public BaseTermQueryBuilder(String fieldName, Object value) {
if (Strings.isEmpty(fieldName)) {
throw new IllegalArgumentException("field name is null or empty");
}
if (value == null) {
throw new IllegalArgumentException("value cannot be null");
}
this.fieldName = fieldName;
this.value = convertToBytesRefIfString(value);
}
/** Returns the field name used in this query. */
public String fieldName() {
return this.fieldName;
}
/**
* Returns the value used in this query.
* If necessary, converts internal {@link BytesRef} representation back to string.
*/
public Object value() {
return convertToStringIfBytesRef(this.value);
}
@Override
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(getName());
builder.startObject(fieldName);
builder.field("value", convertToStringIfBytesRef(this.value));
printBoostAndQueryName(builder);
builder.endObject();
builder.endObject();
}
@Override
protected final int doHashCode() {
return Objects.hash(fieldName, value);
}
@Override
protected final boolean doEquals(BaseTermQueryBuilder other) {
return Objects.equals(fieldName, other.fieldName) &&
Objects.equals(value, other.value);
}
@Override
protected final QB doReadFrom(StreamInput in) throws IOException {
return createBuilder(in.readString(), in.readGenericValue());
}
protected abstract QB createBuilder(String fieldName, Object value);
@Override
protected void doWriteTo(StreamOutput out) throws IOException {
out.writeString(fieldName);
out.writeGenericValue(value);
}
}

View File

@ -19,17 +19,35 @@
package org.elasticsearch.index.query;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.Query;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.xcontent.XContentBuilder;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import static org.elasticsearch.common.lucene.search.Queries.fixNegativeQueryIfNeeded;
/**
* A Query that matches documents matching boolean combinations of other queries.
*/
public class BoolQueryBuilder extends QueryBuilder implements BoostableQueryBuilder<BoolQueryBuilder> {
public class BoolQueryBuilder extends AbstractQueryBuilder<BoolQueryBuilder> {
public static final String NAME = "bool";
public static final boolean ADJUST_PURE_NEGATIVE_DEFAULT = true;
public static final boolean DISABLE_COORD_DEFAULT = false;
static final BoolQueryBuilder PROTOTYPE = new BoolQueryBuilder();
private final List<QueryBuilder> mustClauses = new ArrayList<>();
@ -39,63 +57,92 @@ public class BoolQueryBuilder extends QueryBuilder implements BoostableQueryBuil
private final List<QueryBuilder> shouldClauses = new ArrayList<>();
private float boost = -1;
private boolean disableCoord = DISABLE_COORD_DEFAULT;
private Boolean disableCoord;
private boolean adjustPureNegative = ADJUST_PURE_NEGATIVE_DEFAULT;
private String minimumShouldMatch;
private Boolean adjustPureNegative;
private String queryName;
/**
* Adds a query that <b>must</b> appear in the matching documents and will
* contribute to scoring.
* contribute to scoring. No <tt>null</tt> value allowed.
*/
public BoolQueryBuilder must(QueryBuilder queryBuilder) {
if (queryBuilder == null) {
throw new IllegalArgumentException("inner bool query clause cannot be null");
}
mustClauses.add(queryBuilder);
return this;
}
/**
* Gets the queries that <b>must</b> appear in the matching documents.
*/
public List<QueryBuilder> must() {
return this.mustClauses;
}
/**
* Adds a query that <b>must</b> appear in the matching documents but will
* not contribute to scoring.
* not contribute to scoring. No <tt>null</tt> value allowed.
*/
public BoolQueryBuilder filter(QueryBuilder queryBuilder) {
if (queryBuilder == null) {
throw new IllegalArgumentException("inner bool query clause cannot be null");
}
filterClauses.add(queryBuilder);
return this;
}
/**
* Adds a query that <b>must not</b> appear in the matching documents and
* will not contribute to scoring.
* Gets the queries that <b>must</b> appear in the matching documents but don't conntribute to scoring
*/
public List<QueryBuilder> filter() {
return this.filterClauses;
}
/**
* Adds a query that <b>must not</b> appear in the matching documents.
* No <tt>null</tt> value allowed.
*/
public BoolQueryBuilder mustNot(QueryBuilder queryBuilder) {
if (queryBuilder == null) {
throw new IllegalArgumentException("inner bool query clause cannot be null");
}
mustNotClauses.add(queryBuilder);
return this;
}
/**
* Adds a query that <i>should</i> appear in the matching documents. For a boolean query with no
* Gets the queries that <b>must not</b> appear in the matching documents.
*/
public List<QueryBuilder> mustNot() {
return this.mustNotClauses;
}
/**
* Adds a clause that <i>should</i> be matched by the returned documents. For a boolean query with no
* <tt>MUST</tt> clauses one or more <code>SHOULD</code> clauses must match a document
* for the BooleanQuery to match.
* for the BooleanQuery to match. No <tt>null</tt> value allowed.
*
* @see #minimumNumberShouldMatch(int)
*/
public BoolQueryBuilder should(QueryBuilder queryBuilder) {
if (queryBuilder == null) {
throw new IllegalArgumentException("inner bool query clause cannot be null");
}
shouldClauses.add(queryBuilder);
return this;
}
/**
* Sets the boost for this query. Documents matching this query will (in addition to the normal
* weightings) have their score multiplied by the boost provided.
* Gets the list of clauses that <b>should</b> be matched by the returned documents.
*
* @see #should(QueryBuilder)
* @see #minimumNumberShouldMatch(int)
*/
@Override
public BoolQueryBuilder boost(float boost) {
this.boost = boost;
return this;
public List<QueryBuilder> should() {
return this.shouldClauses;
}
/**
@ -106,6 +153,13 @@ public class BoolQueryBuilder extends QueryBuilder implements BoostableQueryBuil
return this;
}
/**
* @return whether the <tt>Similarity#coord(int,int)</tt> in scoring are disabled. Defaults to <tt>false</tt>.
*/
public boolean disableCoord() {
return this.disableCoord;
}
/**
* Specifies a minimum number of the optional (should) boolean clauses which must be satisfied.
* <p>
@ -124,6 +178,23 @@ public class BoolQueryBuilder extends QueryBuilder implements BoostableQueryBuil
return this;
}
/**
* Specifies a minimum number of the optional (should) boolean clauses which must be satisfied.
* @see BoolQueryBuilder#minimumNumberShouldMatch(int)
*/
public BoolQueryBuilder minimumNumberShouldMatch(String minimumNumberShouldMatch) {
this.minimumShouldMatch = minimumNumberShouldMatch;
return this;
}
/**
* @return the string representation of the minimumShouldMatch settings for this query
*/
public String minimumShouldMatch() {
return this.minimumShouldMatch;
}
/**
* Sets the minimum should match using the special syntax (for example, supporting percentage).
*/
@ -139,7 +210,7 @@ public class BoolQueryBuilder extends QueryBuilder implements BoostableQueryBuil
public boolean hasClauses() {
return !(mustClauses.isEmpty() && shouldClauses.isEmpty() && mustNotClauses.isEmpty() && filterClauses.isEmpty());
}
/**
* If a boolean query contains only negative ("must not") clauses should the
* BooleanQuery be enhanced with a {@link MatchAllDocsQuery} in order to act
@ -151,52 +222,126 @@ public class BoolQueryBuilder extends QueryBuilder implements BoostableQueryBuil
}
/**
* Sets the query name for the filter that can be used when searching for matched_filters per hit.
* @return the setting for the adjust_pure_negative setting in this query
*/
public BoolQueryBuilder queryName(String queryName) {
this.queryName = queryName;
return this;
public boolean adjustPureNegative() {
return this.adjustPureNegative;
}
@Override
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject("bool");
builder.startObject(NAME);
doXArrayContent("must", mustClauses, builder, params);
doXArrayContent("filter", filterClauses, builder, params);
doXArrayContent("must_not", mustNotClauses, builder, params);
doXArrayContent("should", shouldClauses, builder, params);
if (boost != -1) {
builder.field("boost", boost);
}
if (disableCoord != null) {
builder.field("disable_coord", disableCoord);
}
builder.field("disable_coord", disableCoord);
builder.field("adjust_pure_negative", adjustPureNegative);
if (minimumShouldMatch != null) {
builder.field("minimum_should_match", minimumShouldMatch);
}
if (adjustPureNegative != null) {
builder.field("adjust_pure_negative", adjustPureNegative);
}
if (queryName != null) {
builder.field("_name", queryName);
}
printBoostAndQueryName(builder);
builder.endObject();
}
private void doXArrayContent(String field, List<QueryBuilder> clauses, XContentBuilder builder, Params params) throws IOException {
private static void doXArrayContent(String field, List<QueryBuilder> clauses, XContentBuilder builder, Params params) throws IOException {
if (clauses.isEmpty()) {
return;
}
if (clauses.size() == 1) {
builder.field(field);
clauses.get(0).toXContent(builder, params);
} else {
builder.startArray(field);
for (QueryBuilder clause : clauses) {
clause.toXContent(builder, params);
builder.startArray(field);
for (QueryBuilder clause : clauses) {
clause.toXContent(builder, params);
}
builder.endArray();
}
@Override
public String getWriteableName() {
return NAME;
}
@Override
protected Query doToQuery(QueryShardContext context) throws IOException {
BooleanQuery.Builder booleanQueryBuilder = new BooleanQuery.Builder();
booleanQueryBuilder.setDisableCoord(disableCoord);
addBooleanClauses(context, booleanQueryBuilder, mustClauses, BooleanClause.Occur.MUST);
addBooleanClauses(context, booleanQueryBuilder, mustNotClauses, BooleanClause.Occur.MUST_NOT);
addBooleanClauses(context, booleanQueryBuilder, shouldClauses, BooleanClause.Occur.SHOULD);
addBooleanClauses(context, booleanQueryBuilder, filterClauses, BooleanClause.Occur.FILTER);
BooleanQuery booleanQuery = booleanQueryBuilder.build();
if (booleanQuery.clauses().isEmpty()) {
return new MatchAllDocsQuery();
}
booleanQuery = Queries.applyMinimumShouldMatch(booleanQuery, minimumShouldMatch);
return adjustPureNegative ? fixNegativeQueryIfNeeded(booleanQuery) : booleanQuery;
}
private void addBooleanClauses(QueryShardContext context, BooleanQuery.Builder booleanQueryBuilder, List<QueryBuilder> clauses, Occur occurs) throws IOException {
for (QueryBuilder query : clauses) {
Query luceneQuery = null;
switch (occurs) {
case SHOULD:
if (context.isFilter() && minimumShouldMatch == null) {
minimumShouldMatch = "1";
}
luceneQuery = query.toQuery(context);
break;
case FILTER:
case MUST_NOT:
luceneQuery = query.toFilter(context);
break;
case MUST:
luceneQuery = query.toQuery(context);
}
if (luceneQuery != null) {
booleanQueryBuilder.add(new BooleanClause(luceneQuery, occurs));
}
builder.endArray();
}
}
@Override
protected int doHashCode() {
return Objects.hash(adjustPureNegative, disableCoord,
minimumShouldMatch, mustClauses, shouldClauses, mustNotClauses, filterClauses);
}
@Override
protected boolean doEquals(BoolQueryBuilder other) {
return Objects.equals(adjustPureNegative, other.adjustPureNegative) &&
Objects.equals(disableCoord, other.disableCoord) &&
Objects.equals(minimumShouldMatch, other.minimumShouldMatch) &&
Objects.equals(mustClauses, other.mustClauses) &&
Objects.equals(shouldClauses, other.shouldClauses) &&
Objects.equals(mustNotClauses, other.mustNotClauses) &&
Objects.equals(filterClauses, other.filterClauses);
}
@Override
protected BoolQueryBuilder doReadFrom(StreamInput in) throws IOException {
BoolQueryBuilder boolQueryBuilder = new BoolQueryBuilder();
List<QueryBuilder> queryBuilders = readQueries(in);
boolQueryBuilder.mustClauses.addAll(queryBuilders);
queryBuilders = readQueries(in);
boolQueryBuilder.mustNotClauses.addAll(queryBuilders);
queryBuilders = readQueries(in);
boolQueryBuilder.shouldClauses.addAll(queryBuilders);
queryBuilders = readQueries(in);
boolQueryBuilder.filterClauses.addAll(queryBuilders);
boolQueryBuilder.adjustPureNegative = in.readBoolean();
boolQueryBuilder.disableCoord = in.readBoolean();
boolQueryBuilder.minimumShouldMatch = in.readOptionalString();
return boolQueryBuilder;
}
@Override
protected void doWriteTo(StreamOutput out) throws IOException {
writeQueries(out, mustClauses);
writeQueries(out, mustNotClauses);
writeQueries(out, shouldClauses);
writeQueries(out, filterClauses);
out.writeBoolean(adjustPureNegative);
out.writeBoolean(disableCoord);
out.writeOptionalString(minimumShouldMatch);
}
}

View File

@ -19,13 +19,9 @@
package org.elasticsearch.index.query;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.Query;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentParser;
@ -33,14 +29,10 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import static org.elasticsearch.common.lucene.search.Queries.fixNegativeQueryIfNeeded;
/**
*
* Parser for bool query
*/
public class BoolQueryParser implements QueryParser {
public static final String NAME = "bool";
public class BoolQueryParser implements QueryParser<BoolQueryBuilder> {
@Inject
public BoolQueryParser(Settings settings) {
@ -49,23 +41,27 @@ public class BoolQueryParser implements QueryParser {
@Override
public String[] names() {
return new String[]{NAME};
return new String[]{BoolQueryBuilder.NAME};
}
@Override
public Query parse(QueryParseContext parseContext) throws IOException, ParsingException {
public BoolQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException, ParsingException {
XContentParser parser = parseContext.parser();
boolean disableCoord = false;
float boost = 1.0f;
boolean disableCoord = BoolQueryBuilder.DISABLE_COORD_DEFAULT;
boolean adjustPureNegative = BoolQueryBuilder.ADJUST_PURE_NEGATIVE_DEFAULT;
float boost = AbstractQueryBuilder.DEFAULT_BOOST;
String minimumShouldMatch = null;
List<BooleanClause> clauses = new ArrayList<>();
boolean adjustPureNegative = true;
final List<QueryBuilder> mustClauses = new ArrayList<>();
final List<QueryBuilder> mustNotClauses = new ArrayList<>();
final List<QueryBuilder> shouldClauses = new ArrayList<>();
final List<QueryBuilder> filterClauses = new ArrayList<>();
String queryName = null;
String currentFieldName = null;
XContentParser.Token token;
QueryBuilder query;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
@ -74,69 +70,47 @@ public class BoolQueryParser implements QueryParser {
} else if (token == XContentParser.Token.START_OBJECT) {
switch (currentFieldName) {
case "must":
Query query = parseContext.parseInnerQuery();
if (query != null) {
clauses.add(new BooleanClause(query, BooleanClause.Occur.MUST));
}
query = parseContext.parseInnerQueryBuilder();
mustClauses.add(query);
break;
case "should":
query = parseContext.parseInnerQuery();
if (query != null) {
clauses.add(new BooleanClause(query, BooleanClause.Occur.SHOULD));
if (parseContext.isFilter() && minimumShouldMatch == null) {
minimumShouldMatch = "1";
}
}
query = parseContext.parseInnerQueryBuilder();
shouldClauses.add(query);
break;
case "filter":
query = parseContext.parseInnerFilter();
if (query != null) {
clauses.add(new BooleanClause(query, BooleanClause.Occur.FILTER));
}
query = parseContext.parseInnerQueryBuilder();
filterClauses.add(query);
break;
case "must_not":
case "mustNot":
query = parseContext.parseInnerFilter();
if (query != null) {
clauses.add(new BooleanClause(query, BooleanClause.Occur.MUST_NOT));
}
query = parseContext.parseInnerQueryBuilder();
mustNotClauses.add(query);
break;
default:
throw new ParsingException(parseContext, "[bool] query does not support [" + currentFieldName + "]");
throw new ParsingException(parser.getTokenLocation(), "[bool] query does not support [" + currentFieldName + "]");
}
} else if (token == XContentParser.Token.START_ARRAY) {
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
switch (currentFieldName) {
case "must":
Query query = parseContext.parseInnerQuery();
if (query != null) {
clauses.add(new BooleanClause(query, BooleanClause.Occur.MUST));
}
query = parseContext.parseInnerQueryBuilder();
mustClauses.add(query);
break;
case "should":
query = parseContext.parseInnerQuery();
if (query != null) {
clauses.add(new BooleanClause(query, BooleanClause.Occur.SHOULD));
if (parseContext.isFilter() && minimumShouldMatch == null) {
minimumShouldMatch = "1";
}
}
query = parseContext.parseInnerQueryBuilder();
shouldClauses.add(query);
break;
case "filter":
query = parseContext.parseInnerFilter();
if (query != null) {
clauses.add(new BooleanClause(query, BooleanClause.Occur.FILTER));
}
query = parseContext.parseInnerQueryBuilder();
filterClauses.add(query);
break;
case "must_not":
case "mustNot":
query = parseContext.parseInnerFilter();
if (query != null) {
clauses.add(new BooleanClause(query, BooleanClause.Occur.MUST_NOT));
}
query = parseContext.parseInnerQueryBuilder();
mustNotClauses.add(query);
break;
default:
throw new ParsingException(parseContext, "bool query does not support [" + currentFieldName + "]");
throw new ParsingException(parser.getTokenLocation(), "bool query does not support [" + currentFieldName + "]");
}
}
} else if (token.isValue()) {
@ -153,27 +127,33 @@ public class BoolQueryParser implements QueryParser {
} else if ("_name".equals(currentFieldName)) {
queryName = parser.text();
} else {
throw new ParsingException(parseContext, "[bool] query does not support [" + currentFieldName + "]");
throw new ParsingException(parser.getTokenLocation(), "[bool] query does not support [" + currentFieldName + "]");
}
}
}
BoolQueryBuilder boolQuery = new BoolQueryBuilder();
for (QueryBuilder queryBuilder : mustClauses) {
boolQuery.must(queryBuilder);
}
for (QueryBuilder queryBuilder : mustNotClauses) {
boolQuery.mustNot(queryBuilder);
}
for (QueryBuilder queryBuilder : shouldClauses) {
boolQuery.should(queryBuilder);
}
for (QueryBuilder queryBuilder : filterClauses) {
boolQuery.filter(queryBuilder);
}
boolQuery.boost(boost);
boolQuery.disableCoord(disableCoord);
boolQuery.adjustPureNegative(adjustPureNegative);
boolQuery.minimumNumberShouldMatch(minimumShouldMatch);
boolQuery.queryName(queryName);
return boolQuery;
}
if (clauses.isEmpty()) {
return new MatchAllDocsQuery();
}
BooleanQuery.Builder booleanQueryBuilder = new BooleanQuery.Builder();
booleanQueryBuilder.setDisableCoord(disableCoord);
for (BooleanClause clause : clauses) {
booleanQueryBuilder.add(clause);
}
BooleanQuery booleanQuery = booleanQueryBuilder.build();
booleanQuery.setBoost(boost);
booleanQuery = Queries.applyMinimumShouldMatch(booleanQuery, minimumShouldMatch);
Query query = adjustPureNegative ? fixNegativeQueryIfNeeded(booleanQuery) : booleanQuery;
if (queryName != null) {
parseContext.addNamedQuery(queryName, query);
}
return query;
@Override
public BoolQueryBuilder getBuilderPrototype() {
return BoolQueryBuilder.PROTOTYPE;
}
}

View File

@ -1,33 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.query;
/**
* Query builder which allow setting some boost
*/
public interface BoostableQueryBuilder<B extends BoostableQueryBuilder<B>> {
/**
* Sets the boost for this query. Documents matching this query will (in addition to the normal
* weightings) have their score multiplied by the boost provided.
*/
B boost(float boost);
}

View File

@ -19,9 +19,14 @@
package org.elasticsearch.index.query;
import org.apache.lucene.search.Query;
import org.apache.lucene.queries.BoostingQuery;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder;
import java.io.IOException;
import java.util.Objects;
/**
* The BoostingQuery class can be used to effectively demote results that match a given query.
@ -35,63 +40,122 @@ import java.io.IOException;
* multiplied by the supplied "boost" parameter, so this should be less than 1 to achieve a
* demoting effect
*/
public class BoostingQueryBuilder extends QueryBuilder implements BoostableQueryBuilder<BoostingQueryBuilder> {
public class BoostingQueryBuilder extends AbstractQueryBuilder<BoostingQueryBuilder> {
private QueryBuilder positiveQuery;
public static final String NAME = "boosting";
private QueryBuilder negativeQuery;
private final QueryBuilder positiveQuery;
private final QueryBuilder negativeQuery;
private float negativeBoost = -1;
private float boost = -1;
static final BoostingQueryBuilder PROTOTYPE = new BoostingQueryBuilder(EmptyQueryBuilder.PROTOTYPE, EmptyQueryBuilder.PROTOTYPE);
public BoostingQueryBuilder() {
}
public BoostingQueryBuilder positive(QueryBuilder positiveQuery) {
/**
* Create a new {@link BoostingQueryBuilder}
*
* @param positiveQuery the positive query for this boosting query.
* @param negativeQuery the negative query for this boosting query.
*/
public BoostingQueryBuilder(QueryBuilder positiveQuery, QueryBuilder negativeQuery) {
if (positiveQuery == null) {
throw new IllegalArgumentException("inner clause [positive] cannot be null.");
}
if (negativeQuery == null) {
throw new IllegalArgumentException("inner clause [negative] cannot be null.");
}
this.positiveQuery = positiveQuery;
return this;
}
public BoostingQueryBuilder negative(QueryBuilder negativeQuery) {
this.negativeQuery = negativeQuery;
return this;
}
/**
* Get the positive query for this boosting query.
*/
public QueryBuilder positiveQuery() {
return this.positiveQuery;
}
/**
* Get the negative query for this boosting query.
*/
public QueryBuilder negativeQuery() {
return this.negativeQuery;
}
/**
* Set the negative boost factor.
*/
public BoostingQueryBuilder negativeBoost(float negativeBoost) {
if (negativeBoost < 0) {
throw new IllegalArgumentException("query requires negativeBoost to be set to positive value");
}
this.negativeBoost = negativeBoost;
return this;
}
@Override
public BoostingQueryBuilder boost(float boost) {
this.boost = boost;
return this;
/**
* Get the negative boost factor.
*/
public float negativeBoost() {
return this.negativeBoost;
}
@Override
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
if (positiveQuery == null) {
throw new IllegalArgumentException("boosting query requires positive query to be set");
}
if (negativeQuery == null) {
throw new IllegalArgumentException("boosting query requires negative query to be set");
}
if (negativeBoost == -1) {
throw new IllegalArgumentException("boosting query requires negativeBoost to be set");
}
builder.startObject(BoostingQueryParser.NAME);
builder.startObject(NAME);
builder.field("positive");
positiveQuery.toXContent(builder, params);
builder.field("negative");
negativeQuery.toXContent(builder, params);
builder.field("negative_boost", negativeBoost);
if (boost != -1) {
builder.field("boost", boost);
}
printBoostAndQueryName(builder);
builder.endObject();
}
@Override
public String getWriteableName() {
return NAME;
}
@Override
protected Query doToQuery(QueryShardContext context) throws IOException {
Query positive = positiveQuery.toQuery(context);
Query negative = negativeQuery.toQuery(context);
// make upstream queries ignore this query by returning `null`
// if either inner query builder returns null
if (positive == null || negative == null) {
return null;
}
return new BoostingQuery(positive, negative, negativeBoost);
}
@Override
protected int doHashCode() {
return Objects.hash(negativeBoost, positiveQuery, negativeQuery);
}
@Override
protected boolean doEquals(BoostingQueryBuilder other) {
return Objects.equals(negativeBoost, other.negativeBoost) &&
Objects.equals(positiveQuery, other.positiveQuery) &&
Objects.equals(negativeQuery, other.negativeQuery);
}
@Override
protected BoostingQueryBuilder doReadFrom(StreamInput in) throws IOException {
QueryBuilder positiveQuery = in.readQuery();
QueryBuilder negativeQuery = in.readQuery();
BoostingQueryBuilder boostingQuery = new BoostingQueryBuilder(positiveQuery, negativeQuery);
boostingQuery.negativeBoost = in.readFloat();
return boostingQuery;
}
@Override
protected void doWriteTo(StreamOutput out) throws IOException {
out.writeQuery(positiveQuery);
out.writeQuery(negativeQuery);
out.writeFloat(negativeBoost);
}
}

View File

@ -19,40 +19,32 @@
package org.elasticsearch.index.query;
import org.apache.lucene.queries.BoostingQuery;
import org.apache.lucene.search.Query;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
/**
*
* Parser for boosting query
*/
public class BoostingQueryParser implements QueryParser {
public static final String NAME = "boosting";
@Inject
public BoostingQueryParser() {
}
public class BoostingQueryParser implements QueryParser<BoostingQueryBuilder> {
@Override
public String[] names() {
return new String[]{NAME};
return new String[]{BoostingQueryBuilder.NAME};
}
@Override
public Query parse(QueryParseContext parseContext) throws IOException, ParsingException {
public BoostingQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException {
XContentParser parser = parseContext.parser();
Query positiveQuery = null;
QueryBuilder positiveQuery = null;
boolean positiveQueryFound = false;
Query negativeQuery = null;
QueryBuilder negativeQuery = null;
boolean negativeQueryFound = false;
float boost = -1;
float boost = AbstractQueryBuilder.DEFAULT_BOOST;
float negativeBoost = -1;
String queryName = null;
String currentFieldName = null;
XContentParser.Token token;
@ -61,44 +53,46 @@ public class BoostingQueryParser implements QueryParser {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_OBJECT) {
if ("positive".equals(currentFieldName)) {
positiveQuery = parseContext.parseInnerQuery();
positiveQuery = parseContext.parseInnerQueryBuilder();
positiveQueryFound = true;
} else if ("negative".equals(currentFieldName)) {
negativeQuery = parseContext.parseInnerQuery();
negativeQuery = parseContext.parseInnerQueryBuilder();
negativeQueryFound = true;
} else {
throw new ParsingException(parseContext, "[boosting] query does not support [" + currentFieldName + "]");
throw new ParsingException(parser.getTokenLocation(), "[boosting] query does not support [" + currentFieldName + "]");
}
} else if (token.isValue()) {
if ("negative_boost".equals(currentFieldName) || "negativeBoost".equals(currentFieldName)) {
negativeBoost = parser.floatValue();
} else if ("_name".equals(currentFieldName)) {
queryName = parser.text();
} else if ("boost".equals(currentFieldName)) {
boost = parser.floatValue();
} else {
throw new ParsingException(parseContext, "[boosting] query does not support [" + currentFieldName + "]");
throw new ParsingException(parser.getTokenLocation(), "[boosting] query does not support [" + currentFieldName + "]");
}
}
}
if (positiveQuery == null && !positiveQueryFound) {
throw new ParsingException(parseContext, "[boosting] query requires 'positive' query to be set'");
if (!positiveQueryFound) {
throw new ParsingException(parser.getTokenLocation(), "[boosting] query requires 'positive' query to be set'");
}
if (negativeQuery == null && !negativeQueryFound) {
throw new ParsingException(parseContext, "[boosting] query requires 'negative' query to be set'");
if (!negativeQueryFound) {
throw new ParsingException(parser.getTokenLocation(), "[boosting] query requires 'negative' query to be set'");
}
if (negativeBoost == -1) {
throw new ParsingException(parseContext, "[boosting] query requires 'negative_boost' to be set'");
if (negativeBoost < 0) {
throw new ParsingException(parser.getTokenLocation(), "[boosting] query requires 'negative_boost' to be set to be a positive value'");
}
// parsers returned null
if (positiveQuery == null || negativeQuery == null) {
return null;
}
BoostingQuery boostingQuery = new BoostingQuery(positiveQuery, negativeQuery, negativeBoost);
if (boost != -1) {
boostingQuery.setBoost(boost);
}
BoostingQueryBuilder boostingQuery = new BoostingQueryBuilder(positiveQuery, negativeQuery);
boostingQuery.negativeBoost(negativeBoost);
boostingQuery.boost(boost);
boostingQuery.queryName(queryName);
return boostingQuery;
}
@Override
public BoostingQueryBuilder getBuilderPrototype() {
return BoostingQueryBuilder.PROTOTYPE;
}
}

View File

@ -19,12 +19,24 @@
package org.elasticsearch.index.query;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.index.Term;
import org.apache.lucene.queries.ExtendedCommonTermsQuery;
import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.similarities.Similarity;
import org.apache.lucene.util.BytesRefBuilder;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.mapper.MappedFieldType;
import java.io.IOException;
import java.util.Objects;
/**
* CommonTermsQuery query is a query that executes high-frequency terms in a
@ -41,46 +53,58 @@ import java.io.IOException;
* low-frequency terms are matched such that this query can improve query
* execution times significantly if applicable.
*/
public class CommonTermsQueryBuilder extends QueryBuilder implements BoostableQueryBuilder<CommonTermsQueryBuilder> {
public class CommonTermsQueryBuilder extends AbstractQueryBuilder<CommonTermsQueryBuilder> {
public static enum Operator {
OR, AND
}
public static final String NAME = "common";
private final String name;
public static final float DEFAULT_CUTOFF_FREQ = 0.01f;
public static final Operator DEFAULT_HIGH_FREQ_OCCUR = Operator.OR;
public static final Operator DEFAULT_LOW_FREQ_OCCUR = Operator.OR;
public static final boolean DEFAULT_DISABLE_COORD = true;
private final String fieldName;
private final Object text;
private Operator highFreqOperator = null;
private Operator highFreqOperator = DEFAULT_HIGH_FREQ_OCCUR;
private Operator lowFreqOperator = null;
private Operator lowFreqOperator = DEFAULT_LOW_FREQ_OCCUR;
private String analyzer = null;
private Float boost = null;
private String lowFreqMinimumShouldMatch = null;
private String highFreqMinimumShouldMatch = null;
private Boolean disableCoord = null;
private boolean disableCoord = DEFAULT_DISABLE_COORD;
private Float cutoffFrequency = null;
private float cutoffFrequency = DEFAULT_CUTOFF_FREQ;
private String queryName;
static final CommonTermsQueryBuilder PROTOTYPE = new CommonTermsQueryBuilder("field", "text");
/**
* Constructs a new common terms query.
*/
public CommonTermsQueryBuilder(String name, Object text) {
if (name == null) {
throw new IllegalArgumentException("Field name must not be null");
public CommonTermsQueryBuilder(String fieldName, Object text) {
if (Strings.isEmpty(fieldName)) {
throw new IllegalArgumentException("field name is null or empty");
}
if (text == null) {
throw new IllegalArgumentException("Query must not be null");
throw new IllegalArgumentException("text cannot be null.");
}
this.fieldName = fieldName;
this.text = text;
this.name = name;
}
public String fieldName() {
return this.fieldName;
}
public Object value() {
return this.text;
}
/**
@ -89,19 +113,27 @@ public class CommonTermsQueryBuilder extends QueryBuilder implements BoostableQu
* <tt>AND</tt>.
*/
public CommonTermsQueryBuilder highFreqOperator(Operator operator) {
this.highFreqOperator = operator;
this.highFreqOperator = (operator == null) ? DEFAULT_HIGH_FREQ_OCCUR : operator;
return this;
}
public Operator highFreqOperator() {
return highFreqOperator;
}
/**
* Sets the operator to use for terms with a low document frequency (less
* than {@link #cutoffFrequency(float)}. Defaults to <tt>AND</tt>.
*/
public CommonTermsQueryBuilder lowFreqOperator(Operator operator) {
this.lowFreqOperator = operator;
this.lowFreqOperator = (operator == null) ? DEFAULT_LOW_FREQ_OCCUR : operator;
return this;
}
public Operator lowFreqOperator() {
return lowFreqOperator;
}
/**
* Explicitly set the analyzer to use. Defaults to use explicit mapping
* config for the field, or, if not set, the default search analyzer.
@ -111,13 +143,8 @@ public class CommonTermsQueryBuilder extends QueryBuilder implements BoostableQu
return this;
}
/**
* Set the boost to apply to the query.
*/
@Override
public CommonTermsQueryBuilder boost(float boost) {
this.boost = boost;
return this;
public String analyzer() {
return this.analyzer;
}
/**
@ -125,13 +152,17 @@ public class CommonTermsQueryBuilder extends QueryBuilder implements BoostableQu
* in [0..1] (or absolute number &gt;=1) representing the maximum threshold of
* a terms document frequency to be considered a low frequency term.
* Defaults to
* <tt>{@value CommonTermsQueryParser#DEFAULT_MAX_TERM_DOC_FREQ}</tt>
* <tt>{@value #DEFAULT_CUTOFF_FREQ}</tt>
*/
public CommonTermsQueryBuilder cutoffFrequency(float cutoffFrequency) {
this.cutoffFrequency = cutoffFrequency;
return this;
}
public float cutoffFrequency() {
return this.cutoffFrequency;
}
/**
* Sets the minimum number of high frequent query terms that need to match in order to
* produce a hit when there are no low frequen terms.
@ -141,6 +172,10 @@ public class CommonTermsQueryBuilder extends QueryBuilder implements BoostableQu
return this;
}
public String highFreqMinimumShouldMatch() {
return this.highFreqMinimumShouldMatch;
}
/**
* Sets the minimum number of low frequent query terms that need to match in order to
* produce a hit.
@ -149,44 +184,32 @@ public class CommonTermsQueryBuilder extends QueryBuilder implements BoostableQu
this.lowFreqMinimumShouldMatch = lowFreqMinimumShouldMatch;
return this;
}
public String lowFreqMinimumShouldMatch() {
return this.lowFreqMinimumShouldMatch;
}
public CommonTermsQueryBuilder disableCoord(boolean disableCoord) {
this.disableCoord = disableCoord;
return this;
}
/**
* Sets the query name for the filter that can be used when searching for matched_filters per hit.
*/
public CommonTermsQueryBuilder queryName(String queryName) {
this.queryName = queryName;
return this;
public boolean disableCoord() {
return this.disableCoord;
}
@Override
public void doXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(CommonTermsQueryParser.NAME);
builder.startObject(name);
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(NAME);
builder.startObject(fieldName);
builder.field("query", text);
if (disableCoord != null) {
builder.field("disable_coord", disableCoord);
}
if (highFreqOperator != null) {
builder.field("high_freq_operator", highFreqOperator.toString());
}
if (lowFreqOperator != null) {
builder.field("low_freq_operator", lowFreqOperator.toString());
}
builder.field("disable_coord", disableCoord);
builder.field("high_freq_operator", highFreqOperator.toString());
builder.field("low_freq_operator", lowFreqOperator.toString());
if (analyzer != null) {
builder.field("analyzer", analyzer);
}
if (boost != null) {
builder.field("boost", boost);
}
if (cutoffFrequency != null) {
builder.field("cutoff_frequency", cutoffFrequency);
}
builder.field("cutoff_frequency", cutoffFrequency);
if (lowFreqMinimumShouldMatch != null || highFreqMinimumShouldMatch != null) {
builder.startObject("minimum_should_match");
if (lowFreqMinimumShouldMatch != null) {
@ -197,11 +220,113 @@ public class CommonTermsQueryBuilder extends QueryBuilder implements BoostableQu
}
builder.endObject();
}
if (queryName != null) {
builder.field("_name", queryName);
}
printBoostAndQueryName(builder);
builder.endObject();
builder.endObject();
}
@Override
public String getWriteableName() {
return NAME;
}
@Override
protected Query doToQuery(QueryShardContext context) throws IOException {
String field;
MappedFieldType fieldType = context.fieldMapper(fieldName);
if (fieldType != null) {
field = fieldType.names().indexName();
} else {
field = fieldName;
}
Analyzer analyzerObj;
if (analyzer == null) {
if (fieldType != null) {
analyzerObj = context.getSearchAnalyzer(fieldType);
} else {
analyzerObj = context.mapperService().searchAnalyzer();
}
} else {
analyzerObj = context.mapperService().analysisService().analyzer(analyzer);
if (analyzerObj == null) {
throw new QueryShardException(context, "[common] analyzer [" + analyzer + "] not found");
}
}
Occur highFreqOccur = highFreqOperator.toBooleanClauseOccur();
Occur lowFreqOccur = lowFreqOperator.toBooleanClauseOccur();
ExtendedCommonTermsQuery commonsQuery = new ExtendedCommonTermsQuery(highFreqOccur, lowFreqOccur, cutoffFrequency, disableCoord, fieldType);
return parseQueryString(commonsQuery, text, field, analyzerObj, lowFreqMinimumShouldMatch, highFreqMinimumShouldMatch);
}
static Query parseQueryString(ExtendedCommonTermsQuery query, Object queryString, String field, Analyzer analyzer,
String lowFreqMinimumShouldMatch, String highFreqMinimumShouldMatch) throws IOException {
// Logic similar to QueryParser#getFieldQuery
int count = 0;
try (TokenStream source = analyzer.tokenStream(field, queryString.toString())) {
source.reset();
CharTermAttribute termAtt = source.addAttribute(CharTermAttribute.class);
BytesRefBuilder builder = new BytesRefBuilder();
while (source.incrementToken()) {
// UTF-8
builder.copyChars(termAtt);
query.add(new Term(field, builder.toBytesRef()));
count++;
}
}
if (count == 0) {
return null;
}
query.setLowFreqMinimumNumberShouldMatch(lowFreqMinimumShouldMatch);
query.setHighFreqMinimumNumberShouldMatch(highFreqMinimumShouldMatch);
return query;
}
@Override
protected CommonTermsQueryBuilder doReadFrom(StreamInput in) throws IOException {
CommonTermsQueryBuilder commonTermsQueryBuilder = new CommonTermsQueryBuilder(in.readString(), in.readGenericValue());
commonTermsQueryBuilder.highFreqOperator = Operator.readOperatorFrom(in);
commonTermsQueryBuilder.lowFreqOperator = Operator.readOperatorFrom(in);
commonTermsQueryBuilder.analyzer = in.readOptionalString();
commonTermsQueryBuilder.lowFreqMinimumShouldMatch = in.readOptionalString();
commonTermsQueryBuilder.highFreqMinimumShouldMatch = in.readOptionalString();
commonTermsQueryBuilder.disableCoord = in.readBoolean();
commonTermsQueryBuilder.cutoffFrequency = in.readFloat();
return commonTermsQueryBuilder;
}
@Override
protected void doWriteTo(StreamOutput out) throws IOException {
out.writeString(this.fieldName);
out.writeGenericValue(this.text);
highFreqOperator.writeTo(out);
lowFreqOperator.writeTo(out);
out.writeOptionalString(analyzer);
out.writeOptionalString(lowFreqMinimumShouldMatch);
out.writeOptionalString(highFreqMinimumShouldMatch);
out.writeBoolean(disableCoord);
out.writeFloat(cutoffFrequency);
}
@Override
protected int doHashCode() {
return Objects.hash(fieldName, text, highFreqOperator, lowFreqOperator, analyzer,
lowFreqMinimumShouldMatch, highFreqMinimumShouldMatch, disableCoord, cutoffFrequency);
}
@Override
protected boolean doEquals(CommonTermsQueryBuilder other) {
return Objects.equals(fieldName, other.fieldName) &&
Objects.equals(text, other.text) &&
Objects.equals(highFreqOperator, other.highFreqOperator) &&
Objects.equals(lowFreqOperator, other.lowFreqOperator) &&
Objects.equals(analyzer, other.analyzer) &&
Objects.equals(lowFreqMinimumShouldMatch, other.lowFreqMinimumShouldMatch) &&
Objects.equals(highFreqMinimumShouldMatch, other.highFreqMinimumShouldMatch) &&
Objects.equals(disableCoord, other.disableCoord) &&
Objects.equals(cutoffFrequency, other.cutoffFrequency);
}
}

View File

@ -19,64 +19,38 @@
package org.elasticsearch.index.query;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.index.Term;
import org.apache.lucene.queries.ExtendedCommonTermsQuery;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.lucene.search.Query;
import org.apache.lucene.util.BytesRefBuilder;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.MappedFieldType;
import java.io.IOException;
/**
*
* Parser for common terms query
*/
public class CommonTermsQueryParser implements QueryParser {
public static final String NAME = "common";
static final float DEFAULT_MAX_TERM_DOC_FREQ = 0.01f;
static final Occur DEFAULT_HIGH_FREQ_OCCUR = Occur.SHOULD;
static final Occur DEFAULT_LOW_FREQ_OCCUR = Occur.SHOULD;
static final boolean DEFAULT_DISABLE_COORD = true;
@Inject
public CommonTermsQueryParser() {
}
public class CommonTermsQueryParser implements QueryParser<CommonTermsQueryBuilder> {
@Override
public String[] names() {
return new String[] { NAME };
return new String[] { CommonTermsQueryBuilder.NAME };
}
@Override
public Query parse(QueryParseContext parseContext) throws IOException, ParsingException {
public CommonTermsQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException {
XContentParser parser = parseContext.parser();
XContentParser.Token token = parser.nextToken();
if (token != XContentParser.Token.FIELD_NAME) {
throw new ParsingException(parseContext, "[common] query malformed, no field");
throw new ParsingException(parser.getTokenLocation(), "[common] query malformed, no field");
}
String fieldName = parser.currentName();
Object value = null;
float boost = 1.0f;
String queryAnalyzer = null;
Object text = null;
float boost = AbstractQueryBuilder.DEFAULT_BOOST;
String analyzer = null;
String lowFreqMinimumShouldMatch = null;
String highFreqMinimumShouldMatch = null;
boolean disableCoord = DEFAULT_DISABLE_COORD;
Occur highFreqOccur = DEFAULT_HIGH_FREQ_OCCUR;
Occur lowFreqOccur = DEFAULT_LOW_FREQ_OCCUR;
float maxTermFrequency = DEFAULT_MAX_TERM_DOC_FREQ;
boolean disableCoord = CommonTermsQueryBuilder.DEFAULT_DISABLE_COORD;
Operator highFreqOperator = CommonTermsQueryBuilder.DEFAULT_HIGH_FREQ_OCCUR;
Operator lowFreqOperator = CommonTermsQueryBuilder.DEFAULT_LOW_FREQ_OCCUR;
float cutoffFrequency = CommonTermsQueryBuilder.DEFAULT_CUTOFF_FREQ;
String queryName = null;
token = parser.nextToken();
if (token == XContentParser.Token.START_OBJECT) {
@ -96,130 +70,66 @@ public class CommonTermsQueryParser implements QueryParser {
} else if ("high_freq".equals(innerFieldName) || "highFreq".equals(innerFieldName)) {
highFreqMinimumShouldMatch = parser.text();
} else {
throw new ParsingException(parseContext, "[common] query does not support [" + innerFieldName
throw new ParsingException(parser.getTokenLocation(), "[common] query does not support [" + innerFieldName
+ "] for [" + currentFieldName + "]");
}
}
}
} else {
throw new ParsingException(parseContext, "[common] query does not support [" + currentFieldName + "]");
throw new ParsingException(parser.getTokenLocation(), "[common] query does not support [" + currentFieldName + "]");
}
} else if (token.isValue()) {
if ("query".equals(currentFieldName)) {
value = parser.objectText();
text = parser.objectText();
} else if ("analyzer".equals(currentFieldName)) {
String analyzer = parser.text();
if (parseContext.analysisService().analyzer(analyzer) == null) {
throw new ParsingException(parseContext, "[common] analyzer [" + parser.text() + "] not found");
}
queryAnalyzer = analyzer;
analyzer = parser.text();
} else if ("disable_coord".equals(currentFieldName) || "disableCoord".equals(currentFieldName)) {
disableCoord = parser.booleanValue();
} else if ("boost".equals(currentFieldName)) {
boost = parser.floatValue();
} else if ("high_freq_operator".equals(currentFieldName) || "highFreqOperator".equals(currentFieldName)) {
String op = parser.text();
if ("or".equalsIgnoreCase(op)) {
highFreqOccur = BooleanClause.Occur.SHOULD;
} else if ("and".equalsIgnoreCase(op)) {
highFreqOccur = BooleanClause.Occur.MUST;
} else {
throw new ParsingException(parseContext,
"[common] query requires operator to be either 'and' or 'or', not [" + op + "]");
}
highFreqOperator = Operator.fromString(parser.text());
} else if ("low_freq_operator".equals(currentFieldName) || "lowFreqOperator".equals(currentFieldName)) {
String op = parser.text();
if ("or".equalsIgnoreCase(op)) {
lowFreqOccur = BooleanClause.Occur.SHOULD;
} else if ("and".equalsIgnoreCase(op)) {
lowFreqOccur = BooleanClause.Occur.MUST;
} else {
throw new ParsingException(parseContext,
"[common] query requires operator to be either 'and' or 'or', not [" + op + "]");
}
lowFreqOperator = Operator.fromString(parser.text());
} else if ("minimum_should_match".equals(currentFieldName) || "minimumShouldMatch".equals(currentFieldName)) {
lowFreqMinimumShouldMatch = parser.text();
} else if ("cutoff_frequency".equals(currentFieldName)) {
maxTermFrequency = parser.floatValue();
cutoffFrequency = parser.floatValue();
} else if ("_name".equals(currentFieldName)) {
queryName = parser.text();
} else {
throw new ParsingException(parseContext, "[common] query does not support [" + currentFieldName + "]");
throw new ParsingException(parser.getTokenLocation(), "[common] query does not support [" + currentFieldName + "]");
}
}
}
parser.nextToken();
} else {
value = parser.objectText();
text = parser.objectText();
// move to the next token
token = parser.nextToken();
if (token != XContentParser.Token.END_OBJECT) {
throw new ParsingException(
parseContext,
throw new ParsingException(parser.getTokenLocation(),
"[common] query parsed in simplified form, with direct field name, but included more options than just the field name, possibly use its 'options' form, with 'query' element?");
}
}
if (value == null) {
throw new ParsingException(parseContext, "No text specified for text query");
if (text == null) {
throw new ParsingException(parser.getTokenLocation(), "No text specified for text query");
}
String field;
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
if (fieldType != null) {
field = fieldType.names().indexName();
} else {
field = fieldName;
}
Analyzer analyzer = null;
if (queryAnalyzer == null) {
if (fieldType != null) {
analyzer = fieldType.searchAnalyzer();
}
if (analyzer == null && fieldType != null) {
analyzer = parseContext.getSearchAnalyzer(fieldType);
}
if (analyzer == null) {
analyzer = parseContext.mapperService().searchAnalyzer();
}
} else {
analyzer = parseContext.mapperService().analysisService().analyzer(queryAnalyzer);
if (analyzer == null) {
throw new IllegalArgumentException("No analyzer found for [" + queryAnalyzer + "]");
}
}
ExtendedCommonTermsQuery commonsQuery = new ExtendedCommonTermsQuery(highFreqOccur, lowFreqOccur, maxTermFrequency, disableCoord, fieldType);
commonsQuery.setBoost(boost);
Query query = parseQueryString(commonsQuery, value.toString(), field, parseContext, analyzer, lowFreqMinimumShouldMatch, highFreqMinimumShouldMatch);
if (queryName != null) {
parseContext.addNamedQuery(queryName, query);
}
return query;
return new CommonTermsQueryBuilder(fieldName, text)
.lowFreqMinimumShouldMatch(lowFreqMinimumShouldMatch)
.highFreqMinimumShouldMatch(highFreqMinimumShouldMatch)
.analyzer(analyzer)
.highFreqOperator(highFreqOperator)
.lowFreqOperator(lowFreqOperator)
.disableCoord(disableCoord)
.cutoffFrequency(cutoffFrequency)
.boost(boost)
.queryName(queryName);
}
private final Query parseQueryString(ExtendedCommonTermsQuery query, String queryString, String field, QueryParseContext parseContext,
Analyzer analyzer, String lowFreqMinimumShouldMatch, String highFreqMinimumShouldMatch) throws IOException {
// Logic similar to QueryParser#getFieldQuery
int count = 0;
try (TokenStream source = analyzer.tokenStream(field, queryString.toString())) {
source.reset();
CharTermAttribute termAtt = source.addAttribute(CharTermAttribute.class);
BytesRefBuilder builder = new BytesRefBuilder();
while (source.incrementToken()) {
// UTF-8
builder.copyChars(termAtt);
query.add(new Term(field, builder.toBytesRef()));
count++;
}
}
if (count == 0) {
return null;
}
query.setLowFreqMinimumNumberShouldMatch(lowFreqMinimumShouldMatch);
query.setHighFreqMinimumNumberShouldMatch(highFreqMinimumShouldMatch);
return query;
@Override
public CommonTermsQueryBuilder getBuilderPrototype() {
return CommonTermsQueryBuilder.PROTOTYPE;
}
}

View File

@ -19,6 +19,10 @@
package org.elasticsearch.index.query;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.Query;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder;
import java.io.IOException;
@ -28,41 +32,76 @@ import java.util.Objects;
* A query that wraps a filter and simply returns a constant score equal to the
* query boost for every document in the filter.
*/
public class ConstantScoreQueryBuilder extends QueryBuilder implements BoostableQueryBuilder<ConstantScoreQueryBuilder> {
public class ConstantScoreQueryBuilder extends AbstractQueryBuilder<ConstantScoreQueryBuilder> {
public static final String NAME = "constant_score";
private final QueryBuilder filterBuilder;
private float boost = -1;
static final ConstantScoreQueryBuilder PROTOTYPE = new ConstantScoreQueryBuilder(EmptyQueryBuilder.PROTOTYPE);
/**
* A query that wraps a query and simply returns a constant score equal to the
* A query that wraps another query and simply returns a constant score equal to the
* query boost for every document in the query.
*
* @param filterBuilder The query to wrap in a constant score query
*/
public ConstantScoreQueryBuilder(QueryBuilder filterBuilder) {
this.filterBuilder = Objects.requireNonNull(filterBuilder);
if (filterBuilder == null) {
throw new IllegalArgumentException("inner clause [filter] cannot be null.");
}
this.filterBuilder = filterBuilder;
}
/**
* Sets the boost for this query. Documents matching this query will (in addition to the normal
* weightings) have their score multiplied by the boost provided.
* @return the query that was wrapped in this constant score query
*/
@Override
public ConstantScoreQueryBuilder boost(float boost) {
this.boost = boost;
return this;
public QueryBuilder innerQuery() {
return this.filterBuilder;
}
@Override
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(ConstantScoreQueryParser.NAME);
builder.startObject(NAME);
builder.field("filter");
filterBuilder.toXContent(builder, params);
if (boost != -1) {
builder.field("boost", boost);
}
printBoostAndQueryName(builder);
builder.endObject();
}
}
@Override
protected Query doToQuery(QueryShardContext context) throws IOException {
Query innerFilter = filterBuilder.toFilter(context);
if (innerFilter == null ) {
// return null so that parent queries (e.g. bool) also ignore this
return null;
}
return new ConstantScoreQuery(innerFilter);
}
@Override
public String getWriteableName() {
return NAME;
}
@Override
protected int doHashCode() {
return Objects.hash(filterBuilder);
}
@Override
protected boolean doEquals(ConstantScoreQueryBuilder other) {
return Objects.equals(filterBuilder, other.filterBuilder);
}
@Override
protected ConstantScoreQueryBuilder doReadFrom(StreamInput in) throws IOException {
QueryBuilder innerFilterBuilder = in.readQuery();
return new ConstantScoreQueryBuilder(innerFilterBuilder);
}
@Override
protected void doWriteTo(StreamOutput out) throws IOException {
out.writeQuery(filterBuilder);
}
}

View File

@ -19,40 +19,33 @@
package org.elasticsearch.index.query;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.Query;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
/**
*
* Parser for constant_score query
*/
public class ConstantScoreQueryParser implements QueryParser {
public class ConstantScoreQueryParser implements QueryParser<ConstantScoreQueryBuilder> {
public static final String NAME = "constant_score";
private static final ParseField INNER_QUERY_FIELD = new ParseField("filter", "query");
@Inject
public ConstantScoreQueryParser() {
}
@Override
public String[] names() {
return new String[]{NAME, Strings.toCamelCase(NAME)};
return new String[]{ConstantScoreQueryBuilder.NAME, Strings.toCamelCase(ConstantScoreQueryBuilder.NAME)};
}
@Override
public Query parse(QueryParseContext parseContext) throws IOException, ParsingException {
public ConstantScoreQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException {
XContentParser parser = parseContext.parser();
Query filter = null;
QueryBuilder query = null;
boolean queryFound = false;
float boost = 1.0f;
String queryName = null;
float boost = AbstractQueryBuilder.DEFAULT_BOOST;
String currentFieldName = null;
XContentParser.Token token;
@ -63,29 +56,33 @@ public class ConstantScoreQueryParser implements QueryParser {
// skip
} else if (token == XContentParser.Token.START_OBJECT) {
if (parseContext.parseFieldMatcher().match(currentFieldName, INNER_QUERY_FIELD)) {
filter = parseContext.parseInnerFilter();
query = parseContext.parseInnerQueryBuilder();
queryFound = true;
} else {
throw new ParsingException(parseContext, "[constant_score] query does not support [" + currentFieldName + "]");
throw new ParsingException(parser.getTokenLocation(), "[constant_score] query does not support [" + currentFieldName + "]");
}
} else if (token.isValue()) {
if ("boost".equals(currentFieldName)) {
if ("_name".equals(currentFieldName)) {
queryName = parser.text();
} else if ("boost".equals(currentFieldName)) {
boost = parser.floatValue();
} else {
throw new ParsingException(parseContext, "[constant_score] query does not support [" + currentFieldName + "]");
throw new ParsingException(parser.getTokenLocation(), "[constant_score] query does not support [" + currentFieldName + "]");
}
}
}
if (!queryFound) {
throw new ParsingException(parseContext, "[constant_score] requires a 'filter' element");
throw new ParsingException(parser.getTokenLocation(), "[constant_score] requires a 'filter' element");
}
if (filter == null) {
return null;
}
filter = new ConstantScoreQuery(filter);
filter.setBoost(boost);
return filter;
ConstantScoreQueryBuilder constantScoreBuilder = new ConstantScoreQueryBuilder(query);
constantScoreBuilder.boost(boost);
constantScoreBuilder.queryName(queryName);
return constantScoreBuilder;
}
}
@Override
public ConstantScoreQueryBuilder getBuilderPrototype() {
return ConstantScoreQueryBuilder.PROTOTYPE;
}
}

View File

@ -19,42 +19,51 @@
package org.elasticsearch.index.query;
import org.apache.lucene.search.DisjunctionMaxQuery;
import org.apache.lucene.search.Query;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Objects;
/**
* A query that generates the union of documents produced by its sub-queries, and that scores each document
* with the maximum score for that document as produced by any sub-query, plus a tie breaking increment for any
* additional matching sub-queries.
*/
public class DisMaxQueryBuilder extends QueryBuilder implements BoostableQueryBuilder<DisMaxQueryBuilder> {
public class DisMaxQueryBuilder extends AbstractQueryBuilder<DisMaxQueryBuilder> {
private ArrayList<QueryBuilder> queries = new ArrayList<>();
public static final String NAME = "dis_max";
private float boost = -1;
private final ArrayList<QueryBuilder> queries = new ArrayList<>();
private float tieBreaker = -1;
/** Default multiplication factor for breaking ties in document scores.*/
public static float DEFAULT_TIE_BREAKER = 0.0f;
private float tieBreaker = DEFAULT_TIE_BREAKER;
private String queryName;
static final DisMaxQueryBuilder PROTOTYPE = new DisMaxQueryBuilder();
/**
* Add a sub-query to this disjunction.
*/
public DisMaxQueryBuilder add(QueryBuilder queryBuilder) {
if (queryBuilder == null) {
throw new IllegalArgumentException("inner dismax query clause cannot be null");
}
queries.add(queryBuilder);
return this;
}
/**
* Sets the boost for this query. Documents matching this query will (in addition to the normal
* weightings) have their score multiplied by the boost provided.
* @return an immutable list copy of the current sub-queries of this disjunction
*/
@Override
public DisMaxQueryBuilder boost(float boost) {
this.boost = boost;
return this;
public List<QueryBuilder> innerQueries() {
return this.queries;
}
/**
@ -69,30 +78,65 @@ public class DisMaxQueryBuilder extends QueryBuilder implements BoostableQueryBu
}
/**
* Sets the query name for the filter that can be used when searching for matched_filters per hit.
* @return the tie breaker score
* @see DisMaxQueryBuilder#tieBreaker(float)
*/
public DisMaxQueryBuilder queryName(String queryName) {
this.queryName = queryName;
return this;
public float tieBreaker() {
return this.tieBreaker;
}
@Override
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(DisMaxQueryParser.NAME);
if (tieBreaker != -1) {
builder.field("tie_breaker", tieBreaker);
}
if (boost != -1) {
builder.field("boost", boost);
}
if (queryName != null) {
builder.field("_name", queryName);
}
builder.startObject(NAME);
builder.field("tie_breaker", tieBreaker);
builder.startArray("queries");
for (QueryBuilder queryBuilder : queries) {
queryBuilder.toXContent(builder, params);
}
builder.endArray();
printBoostAndQueryName(builder);
builder.endObject();
}
}
@Override
protected Query doToQuery(QueryShardContext context) throws IOException {
// return null if there are no queries at all
Collection<Query> luceneQueries = toQueries(queries, context);
if (luceneQueries.isEmpty()) {
return null;
}
return new DisjunctionMaxQuery(luceneQueries, tieBreaker);
}
@Override
protected DisMaxQueryBuilder doReadFrom(StreamInput in) throws IOException {
DisMaxQueryBuilder disMax = new DisMaxQueryBuilder();
List<QueryBuilder> queryBuilders = readQueries(in);
disMax.queries.addAll(queryBuilders);
disMax.tieBreaker = in.readFloat();
return disMax;
}
@Override
protected void doWriteTo(StreamOutput out) throws IOException {
writeQueries(out, queries);
out.writeFloat(tieBreaker);
}
@Override
protected int doHashCode() {
return Objects.hash(queries, tieBreaker);
}
@Override
protected boolean doEquals(DisMaxQueryBuilder other) {
return Objects.equals(queries, other.queries) &&
Objects.equals(tieBreaker, other.tieBreaker);
}
@Override
public String getWriteableName() {
return NAME;
}
}

View File

@ -19,11 +19,8 @@
package org.elasticsearch.index.query;
import org.apache.lucene.search.DisjunctionMaxQuery;
import org.apache.lucene.search.Query;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
@ -31,29 +28,23 @@ import java.util.ArrayList;
import java.util.List;
/**
*
* Parser for dis_max query
*/
public class DisMaxQueryParser implements QueryParser {
public static final String NAME = "dis_max";
@Inject
public DisMaxQueryParser() {
}
public class DisMaxQueryParser implements QueryParser<DisMaxQueryBuilder> {
@Override
public String[] names() {
return new String[]{NAME, Strings.toCamelCase(NAME)};
return new String[]{DisMaxQueryBuilder.NAME, Strings.toCamelCase(DisMaxQueryBuilder.NAME)};
}
@Override
public Query parse(QueryParseContext parseContext) throws IOException, ParsingException {
public DisMaxQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException {
XContentParser parser = parseContext.parser();
float boost = 1.0f;
float tieBreaker = 0.0f;
float boost = AbstractQueryBuilder.DEFAULT_BOOST;
float tieBreaker = DisMaxQueryBuilder.DEFAULT_TIE_BREAKER;
List<Query> queries = new ArrayList<>();
final List<QueryBuilder> queries = new ArrayList<>();
boolean queriesFound = false;
String queryName = null;
@ -65,25 +56,21 @@ public class DisMaxQueryParser implements QueryParser {
} else if (token == XContentParser.Token.START_OBJECT) {
if ("queries".equals(currentFieldName)) {
queriesFound = true;
Query query = parseContext.parseInnerQuery();
if (query != null) {
queries.add(query);
}
QueryBuilder query = parseContext.parseInnerQueryBuilder();
queries.add(query);
} else {
throw new ParsingException(parseContext, "[dis_max] query does not support [" + currentFieldName + "]");
throw new ParsingException(parser.getTokenLocation(), "[dis_max] query does not support [" + currentFieldName + "]");
}
} else if (token == XContentParser.Token.START_ARRAY) {
if ("queries".equals(currentFieldName)) {
queriesFound = true;
while (token != XContentParser.Token.END_ARRAY) {
Query query = parseContext.parseInnerQuery();
if (query != null) {
queries.add(query);
}
QueryBuilder query = parseContext.parseInnerQueryBuilder();
queries.add(query);
token = parser.nextToken();
}
} else {
throw new ParsingException(parseContext, "[dis_max] query does not support [" + currentFieldName + "]");
throw new ParsingException(parser.getTokenLocation(), "[dis_max] query does not support [" + currentFieldName + "]");
}
} else {
if ("boost".equals(currentFieldName)) {
@ -93,24 +80,27 @@ public class DisMaxQueryParser implements QueryParser {
} else if ("_name".equals(currentFieldName)) {
queryName = parser.text();
} else {
throw new ParsingException(parseContext, "[dis_max] query does not support [" + currentFieldName + "]");
throw new ParsingException(parser.getTokenLocation(), "[dis_max] query does not support [" + currentFieldName + "]");
}
}
}
if (!queriesFound) {
throw new ParsingException(parseContext, "[dis_max] requires 'queries' field");
throw new ParsingException(parser.getTokenLocation(), "[dis_max] requires 'queries' field");
}
if (queries.isEmpty()) {
return null;
DisMaxQueryBuilder disMaxQuery = new DisMaxQueryBuilder();
disMaxQuery.tieBreaker(tieBreaker);
disMaxQuery.queryName(queryName);
disMaxQuery.boost(boost);
for (QueryBuilder query : queries) {
disMaxQuery.add(query);
}
DisjunctionMaxQuery query = new DisjunctionMaxQuery(queries, tieBreaker);
query.setBoost(boost);
if (queryName != null) {
parseContext.addNamedQuery(queryName, query);
}
return query;
return disMaxQuery;
}
}
@Override
public DisMaxQueryBuilder getBuilderPrototype() {
return DisMaxQueryBuilder.PROTOTYPE;
}
}

View File

@ -0,0 +1,111 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.query;
import org.apache.lucene.search.Query;
import org.elasticsearch.action.support.ToXContentToBytes;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentType;
import java.io.IOException;
/**
* A {@link QueryBuilder} that is a stand in replacement for an empty query clause in the DSL.
* The current DSL allows parsing inner queries / filters like "{ }", in order to have a
* valid non-null representation of these clauses that actually do nothing we can use this class.
*
* This builder has no corresponding parser and it is not registered under the query name. It is
* intended to be used internally as a stand-in for nested queries that are left empty and should
* be ignored upstream.
*/
public class EmptyQueryBuilder extends ToXContentToBytes implements QueryBuilder<EmptyQueryBuilder> {
public static final String NAME = "empty_query";
/** the one and only empty query builder */
public static final EmptyQueryBuilder PROTOTYPE = new EmptyQueryBuilder();
// prevent instances other than prototype
private EmptyQueryBuilder() {
super(XContentType.JSON);
}
@Override
public String getWriteableName() {
return NAME;
}
@Override
public String getName() {
return getWriteableName();
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.endObject();
return builder;
}
@Override
public Query toQuery(QueryShardContext context) throws IOException {
// empty
return null;
}
@Override
public Query toFilter(QueryShardContext context) throws IOException {
// empty
return null;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
}
@Override
public EmptyQueryBuilder readFrom(StreamInput in) throws IOException {
return EmptyQueryBuilder.PROTOTYPE;
}
@Override
public EmptyQueryBuilder queryName(String queryName) {
//no-op
return this;
}
@Override
public String queryName() {
return null;
}
@Override
public float boost() {
return -1;
}
@Override
public EmptyQueryBuilder boost(float boost) {
//no-op
return this;
}
}

View File

@ -19,38 +19,124 @@
package org.elasticsearch.index.query;
import org.apache.lucene.search.*;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.internal.FieldNamesFieldMapper;
import org.elasticsearch.index.mapper.object.ObjectMapper;
import java.io.IOException;
import java.util.Collection;
import java.util.Objects;
/**
* Constructs a query that only match on documents that the field has a value in them.
*/
public class ExistsQueryBuilder extends QueryBuilder {
public class ExistsQueryBuilder extends AbstractQueryBuilder<ExistsQueryBuilder> {
private String name;
public static final String NAME = "exists";
private String queryName;
private final String fieldName;
public ExistsQueryBuilder(String name) {
this.name = name;
static final ExistsQueryBuilder PROTOTYPE = new ExistsQueryBuilder("field");
public ExistsQueryBuilder(String fieldName) {
if (Strings.isEmpty(fieldName)) {
throw new IllegalArgumentException("field name is null or empty");
}
this.fieldName = fieldName;
}
/**
* Sets the query name for the query that can be used when searching for matched_queries per hit.
* @return the field name that has to exist for this query to match
*/
public ExistsQueryBuilder queryName(String queryName) {
this.queryName = queryName;
return this;
public String fieldName() {
return this.fieldName;
}
@Override
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(ExistsQueryParser.NAME);
builder.field("field", name);
if (queryName != null) {
builder.field("_name", queryName);
}
builder.startObject(NAME);
builder.field("field", fieldName);
printBoostAndQueryName(builder);
builder.endObject();
}
@Override
protected Query doToQuery(QueryShardContext context) throws IOException {
return newFilter(context, fieldName);
}
public static Query newFilter(QueryShardContext context, String fieldPattern) {
final FieldNamesFieldMapper.FieldNamesFieldType fieldNamesFieldType = (FieldNamesFieldMapper.FieldNamesFieldType)context.mapperService().fullName(FieldNamesFieldMapper.NAME);
if (fieldNamesFieldType == null) {
// can only happen when no types exist, so no docs exist either
return Queries.newMatchNoDocsQuery();
}
ObjectMapper objectMapper = context.getObjectMapper(fieldPattern);
if (objectMapper != null) {
// automatic make the object mapper pattern
fieldPattern = fieldPattern + ".*";
}
Collection<String> fields = context.simpleMatchToIndexNames(fieldPattern);
if (fields.isEmpty()) {
// no fields exists, so we should not match anything
return Queries.newMatchNoDocsQuery();
}
BooleanQuery.Builder boolFilterBuilder = new BooleanQuery.Builder();
for (String field : fields) {
MappedFieldType fieldType = context.fieldMapper(field);
Query filter = null;
if (fieldNamesFieldType.isEnabled()) {
final String f;
if (fieldType != null) {
f = fieldType.names().indexName();
} else {
f = field;
}
filter = fieldNamesFieldType.termQuery(f, context);
}
// if _field_names are not indexed, we need to go the slow way
if (filter == null && fieldType != null) {
filter = fieldType.rangeQuery(null, null, true, true);
}
if (filter == null) {
filter = new TermRangeQuery(field, null, null, true, true);
}
boolFilterBuilder.add(filter, BooleanClause.Occur.SHOULD);
}
return new ConstantScoreQuery(boolFilterBuilder.build());
}
@Override
protected int doHashCode() {
return Objects.hash(fieldName);
}
@Override
protected boolean doEquals(ExistsQueryBuilder other) {
return Objects.equals(fieldName, other.fieldName);
}
@Override
protected ExistsQueryBuilder doReadFrom(StreamInput in) throws IOException {
return new ExistsQueryBuilder(in.readString());
}
@Override
protected void doWriteTo(StreamOutput out) throws IOException {
out.writeString(fieldName);
}
@Override
public String getWriteableName() {
return NAME;
}
}

View File

@ -19,40 +19,28 @@
package org.elasticsearch.index.query;
import org.apache.lucene.search.*;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.internal.FieldNamesFieldMapper;
import org.elasticsearch.index.mapper.object.ObjectMapper;
import java.io.IOException;
import java.util.Collection;
/**
*
* Parser for exists query
*/
public class ExistsQueryParser implements QueryParser {
public static final String NAME = "exists";
@Inject
public ExistsQueryParser() {
}
public class ExistsQueryParser implements QueryParser<ExistsQueryBuilder> {
@Override
public String[] names() {
return new String[]{NAME};
return new String[]{ExistsQueryBuilder.NAME};
}
@Override
public Query parse(QueryParseContext parseContext) throws IOException, ParsingException {
public ExistsQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException {
XContentParser parser = parseContext.parser();
String fieldPattern = null;
String queryName = null;
float boost = AbstractQueryBuilder.DEFAULT_BOOST;
XContentParser.Token token;
String currentFieldName = null;
@ -64,66 +52,26 @@ public class ExistsQueryParser implements QueryParser {
fieldPattern = parser.text();
} else if ("_name".equals(currentFieldName)) {
queryName = parser.text();
} else if ("boost".equals(currentFieldName)) {
boost = parser.floatValue();
} else {
throw new ParsingException(parseContext, "[exists] query does not support [" + currentFieldName + "]");
throw new ParsingException(parser.getTokenLocation(), "[exists] query does not support [" + currentFieldName + "]");
}
}
}
if (fieldPattern == null) {
throw new ParsingException(parseContext, "exists must be provided with a [field]");
throw new ParsingException(parser.getTokenLocation(), "exists must be provided with a [field]");
}
return newFilter(parseContext, fieldPattern, queryName);
ExistsQueryBuilder builder = new ExistsQueryBuilder(fieldPattern);
builder.queryName(queryName);
builder.boost(boost);
return builder;
}
public static Query newFilter(QueryParseContext parseContext, String fieldPattern, String queryName) {
final FieldNamesFieldMapper.FieldNamesFieldType fieldNamesFieldType = (FieldNamesFieldMapper.FieldNamesFieldType)parseContext.mapperService().fullName(FieldNamesFieldMapper.NAME);
if (fieldNamesFieldType == null) {
// can only happen when no types exist, so no docs exist either
return Queries.newMatchNoDocsQuery();
}
ObjectMapper objectMapper = parseContext.getObjectMapper(fieldPattern);
if (objectMapper != null) {
// automatic make the object mapper pattern
fieldPattern = fieldPattern + ".*";
}
Collection<String> fields = parseContext.simpleMatchToIndexNames(fieldPattern);
if (fields.isEmpty()) {
// no fields exists, so we should not match anything
return Queries.newMatchNoDocsQuery();
}
BooleanQuery.Builder boolFilterBuilder = new BooleanQuery.Builder();
for (String field : fields) {
MappedFieldType fieldType = parseContext.fieldMapper(field);
Query filter = null;
if (fieldNamesFieldType.isEnabled()) {
final String f;
if (fieldType != null) {
f = fieldType.names().indexName();
} else {
f = field;
}
filter = fieldNamesFieldType.termQuery(f, parseContext);
}
// if _field_names are not indexed, we need to go the slow way
if (filter == null && fieldType != null) {
filter = fieldType.rangeQuery(null, null, true, true);
}
if (filter == null) {
filter = new TermRangeQuery(field, null, null, true, true);
}
boolFilterBuilder.add(filter, BooleanClause.Occur.SHOULD);
}
BooleanQuery boolFilter = boolFilterBuilder.build();
if (queryName != null) {
parseContext.addNamedQuery(queryName, boolFilter);
}
return new ConstantScoreQuery(boolFilter);
@Override
public ExistsQueryBuilder getBuilderPrototype() {
return ExistsQueryBuilder.PROTOTYPE;
}
}

View File

@ -19,52 +19,106 @@
package org.elasticsearch.index.query;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.spans.FieldMaskingSpanQuery;
import org.apache.lucene.search.spans.SpanQuery;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.mapper.MappedFieldType;
import java.io.IOException;
import java.util.Objects;
public class FieldMaskingSpanQueryBuilder extends SpanQueryBuilder implements BoostableQueryBuilder<FieldMaskingSpanQueryBuilder> {
public class FieldMaskingSpanQueryBuilder extends AbstractQueryBuilder<FieldMaskingSpanQueryBuilder> implements SpanQueryBuilder<FieldMaskingSpanQueryBuilder>{
public static final String NAME = "field_masking_span";
private final SpanQueryBuilder queryBuilder;
private final String field;
private final String fieldName;
private float boost = -1;
static final FieldMaskingSpanQueryBuilder PROTOTYPE = new FieldMaskingSpanQueryBuilder(new SpanTermQueryBuilder("field", "text"), "field");
private String queryName;
public FieldMaskingSpanQueryBuilder(SpanQueryBuilder queryBuilder, String field) {
/**
* Constructs a new {@link FieldMaskingSpanQueryBuilder} given an inner {@link SpanQueryBuilder} for
* a given field
* @param queryBuilder inner {@link SpanQueryBuilder}
* @param fieldName the field name
*/
public FieldMaskingSpanQueryBuilder(SpanQueryBuilder queryBuilder, String fieldName) {
if (Strings.isEmpty(fieldName)) {
throw new IllegalArgumentException("field name is null or empty");
}
if (queryBuilder == null) {
throw new IllegalArgumentException("inner clause [query] cannot be null.");
}
this.queryBuilder = queryBuilder;
this.field = field;
}
@Override
public FieldMaskingSpanQueryBuilder boost(float boost) {
this.boost = boost;
return this;
this.fieldName = fieldName;
}
/**
* Sets the query name for the filter that can be used when searching for matched_filters per hit.
* @return the field name for this query
*/
public FieldMaskingSpanQueryBuilder queryName(String queryName) {
this.queryName = queryName;
return this;
public String fieldName() {
return this.fieldName;
}
/**
* @return the inner {@link QueryBuilder}
*/
public SpanQueryBuilder innerQuery() {
return this.queryBuilder;
}
@Override
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(FieldMaskingSpanQueryParser.NAME);
builder.startObject(NAME);
builder.field("query");
queryBuilder.toXContent(builder, params);
builder.field("field", field);
if (boost != -1) {
builder.field("boost", boost);
}
if (queryName != null) {
builder.field("_name", queryName);
}
builder.field("field", fieldName);
printBoostAndQueryName(builder);
builder.endObject();
}
@Override
protected SpanQuery doToQuery(QueryShardContext context) throws IOException {
String fieldInQuery = fieldName;
MappedFieldType fieldType = context.fieldMapper(fieldName);
if (fieldType != null) {
fieldInQuery = fieldType.names().indexName();
}
Query innerQuery = queryBuilder.toQuery(context);
assert innerQuery instanceof SpanQuery;
return new FieldMaskingSpanQuery((SpanQuery)innerQuery, fieldInQuery);
}
@Override
protected FieldMaskingSpanQueryBuilder doReadFrom(StreamInput in) throws IOException {
QueryBuilder innerQueryBuilder = in.readQuery();
return new FieldMaskingSpanQueryBuilder((SpanQueryBuilder) innerQueryBuilder, in.readString());
}
@Override
protected void doWriteTo(StreamOutput out) throws IOException {
out.writeQuery(queryBuilder);
out.writeString(fieldName);
}
@Override
protected int doHashCode() {
return Objects.hash(queryBuilder, fieldName);
}
@Override
protected boolean doEquals(FieldMaskingSpanQueryBuilder other) {
return Objects.equals(queryBuilder, other.queryBuilder) &&
Objects.equals(fieldName, other.fieldName);
}
@Override
public String getWriteableName() {
return NAME;
}
}

View File

@ -19,40 +19,28 @@
package org.elasticsearch.index.query;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.spans.FieldMaskingSpanQuery;
import org.apache.lucene.search.spans.SpanQuery;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.MappedFieldType;
import java.io.IOException;
/**
*
* Parser for field_masking_span query
*/
public class FieldMaskingSpanQueryParser implements QueryParser {
public static final String NAME = "field_masking_span";
@Inject
public FieldMaskingSpanQueryParser() {
}
public class FieldMaskingSpanQueryParser implements QueryParser<FieldMaskingSpanQueryBuilder> {
@Override
public String[] names() {
return new String[]{NAME, Strings.toCamelCase(NAME)};
return new String[]{FieldMaskingSpanQueryBuilder.NAME, Strings.toCamelCase(FieldMaskingSpanQueryBuilder.NAME)};
}
@Override
public Query parse(QueryParseContext parseContext) throws IOException, ParsingException {
public FieldMaskingSpanQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException {
XContentParser parser = parseContext.parser();
float boost = 1.0f;
float boost = AbstractQueryBuilder.DEFAULT_BOOST;
SpanQuery inner = null;
SpanQueryBuilder inner = null;
String field = null;
String queryName = null;
@ -63,13 +51,13 @@ public class FieldMaskingSpanQueryParser implements QueryParser {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_OBJECT) {
if ("query".equals(currentFieldName)) {
Query query = parseContext.parseInnerQuery();
if (!(query instanceof SpanQuery)) {
throw new ParsingException(parseContext, "[field_masking_span] query] must be of type span query");
QueryBuilder query = parseContext.parseInnerQueryBuilder();
if (!(query instanceof SpanQueryBuilder)) {
throw new ParsingException(parser.getTokenLocation(), "[field_masking_span] query must be of type span query");
}
inner = (SpanQuery) query;
inner = (SpanQueryBuilder) query;
} else {
throw new ParsingException(parseContext, "[field_masking_span] query does not support ["
throw new ParsingException(parser.getTokenLocation(), "[field_masking_span] query does not support ["
+ currentFieldName + "]");
}
} else {
@ -80,27 +68,25 @@ public class FieldMaskingSpanQueryParser implements QueryParser {
} else if ("_name".equals(currentFieldName)) {
queryName = parser.text();
} else {
throw new ParsingException(parseContext, "[field_masking_span] query does not support [" + currentFieldName + "]");
throw new ParsingException(parser.getTokenLocation(), "[field_masking_span] query does not support [" + currentFieldName + "]");
}
}
}
if (inner == null) {
throw new ParsingException(parseContext, "field_masking_span must have [query] span query clause");
throw new ParsingException(parser.getTokenLocation(), "field_masking_span must have [query] span query clause");
}
if (field == null) {
throw new ParsingException(parseContext, "field_masking_span must have [field] set for it");
throw new ParsingException(parser.getTokenLocation(), "field_masking_span must have [field] set for it");
}
MappedFieldType fieldType = parseContext.fieldMapper(field);
if (fieldType != null) {
field = fieldType.names().indexName();
}
FieldMaskingSpanQuery query = new FieldMaskingSpanQuery(inner, field);
query.setBoost(boost);
if (queryName != null) {
parseContext.addNamedQuery(queryName, query);
}
return query;
FieldMaskingSpanQueryBuilder queryBuilder = new FieldMaskingSpanQueryBuilder(inner, field);
queryBuilder.boost(boost);
queryBuilder.queryName(queryName);
return queryBuilder;
}
}
@Override
public FieldMaskingSpanQueryBuilder getBuilderPrototype() {
return FieldMaskingSpanQueryBuilder.PROTOTYPE;
}
}

View File

@ -19,177 +19,273 @@
package org.elasticsearch.index.query;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.FuzzyQuery;
import org.apache.lucene.search.MultiTermQuery;
import org.apache.lucene.search.Query;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.lucene.BytesRefs;
import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.query.support.QueryParsers;
import java.io.IOException;
import java.util.Objects;
/**
* A Query that does fuzzy matching for a specific value.
*/
public class FuzzyQueryBuilder extends MultiTermQueryBuilder implements BoostableQueryBuilder<FuzzyQueryBuilder> {
public class FuzzyQueryBuilder extends AbstractQueryBuilder<FuzzyQueryBuilder> implements MultiTermQueryBuilder<FuzzyQueryBuilder> {
private final String name;
public static final String NAME = "fuzzy";
/** Default maximum edit distance. Defaults to AUTO. */
public static final Fuzziness DEFAULT_FUZZINESS = Fuzziness.AUTO;
/** Default number of initial characters which will not be “fuzzified”. Defaults to 0. */
public static final int DEFAULT_PREFIX_LENGTH = FuzzyQuery.defaultPrefixLength;
/** Default maximum number of terms that the fuzzy query will expand to. Defaults to 50. */
public static final int DEFAULT_MAX_EXPANSIONS = FuzzyQuery.defaultMaxExpansions;
/** Default as to whether transpositions should be treated as a primitive edit operation,
* instead of classic Levenshtein algorithm. Defaults to false. */
public static final boolean DEFAULT_TRANSPOSITIONS = false;
private final String fieldName;
private final Object value;
private float boost = -1;
private Fuzziness fuzziness = DEFAULT_FUZZINESS;
private Fuzziness fuzziness;
private int prefixLength = DEFAULT_PREFIX_LENGTH;
private Integer prefixLength;
private int maxExpansions = DEFAULT_MAX_EXPANSIONS;
private Integer maxExpansions;
//LUCENE 4 UPGRADE we need a testcase for this + documentation
private Boolean transpositions;
private boolean transpositions = DEFAULT_TRANSPOSITIONS;
private String rewrite;
private String queryName;
static final FuzzyQueryBuilder PROTOTYPE = new FuzzyQueryBuilder();
/**
* Constructs a new fuzzy query.
*
* @param name The name of the field
* @param fieldName The name of the field
* @param value The value of the text
*/
public FuzzyQueryBuilder(String name, Object value) {
this.name = name;
this.value = value;
public FuzzyQueryBuilder(String fieldName, String value) {
this(fieldName, (Object) value);
}
/**
* Constructs a new fuzzy query.
*
* @param name The name of the field
* @param fieldName The name of the field
* @param value The value of the text
*/
public FuzzyQueryBuilder(String name, String value) {
this(name, (Object) value);
public FuzzyQueryBuilder(String fieldName, int value) {
this(fieldName, (Object) value);
}
/**
* Constructs a new fuzzy query.
*
* @param name The name of the field
* @param fieldName The name of the field
* @param value The value of the text
*/
public FuzzyQueryBuilder(String name, int value) {
this(name, (Object) value);
public FuzzyQueryBuilder(String fieldName, long value) {
this(fieldName, (Object) value);
}
/**
* Constructs a new fuzzy query.
*
* @param name The name of the field
* @param fieldName The name of the field
* @param value The value of the text
*/
public FuzzyQueryBuilder(String name, long value) {
this(name, (Object) value);
public FuzzyQueryBuilder(String fieldName, float value) {
this(fieldName, (Object) value);
}
/**
* Constructs a new fuzzy query.
*
* @param name The name of the field
* @param fieldName The name of the field
* @param value The value of the text
*/
public FuzzyQueryBuilder(String name, float value) {
this(name, (Object) value);
public FuzzyQueryBuilder(String fieldName, double value) {
this(fieldName, (Object) value);
}
/**
* Constructs a new fuzzy query.
*
* @param name The name of the field
* @param fieldName The name of the field
* @param value The value of the text
*/
public FuzzyQueryBuilder(String name, double value) {
this(name, (Object) value);
public FuzzyQueryBuilder(String fieldName, boolean value) {
this(fieldName, (Object) value);
}
// NO COMMIT: not sure we should also allow boolean?
/**
* Constructs a new fuzzy query.
*
* @param name The name of the field
* @param value The value of the text
* @param fieldName The name of the field
* @param value The value of the term
*/
public FuzzyQueryBuilder(String name, boolean value) {
this(name, (Object) value);
public FuzzyQueryBuilder(String fieldName, Object value) {
if (Strings.isEmpty(fieldName)) {
throw new IllegalArgumentException("field name cannot be null or empty.");
}
if (value == null) {
throw new IllegalArgumentException("query value cannot be null");
}
this.fieldName = fieldName;
this.value = convertToBytesRefIfString(value);
}
/**
* Sets the boost for this query. Documents matching this query will (in addition to the normal
* weightings) have their score multiplied by the boost provided.
*/
@Override
public FuzzyQueryBuilder boost(float boost) {
this.boost = boost;
return this;
private FuzzyQueryBuilder() {
// for protoype
this.fieldName = null;
this.value = null;
}
public String fieldName() {
return this.fieldName;
}
public Object value() {
return convertToStringIfBytesRef(this.value);
}
public FuzzyQueryBuilder fuzziness(Fuzziness fuzziness) {
this.fuzziness = fuzziness;
this.fuzziness = (fuzziness == null) ? DEFAULT_FUZZINESS : fuzziness;
return this;
}
public Fuzziness fuzziness() {
return this.fuzziness;
}
public FuzzyQueryBuilder prefixLength(int prefixLength) {
this.prefixLength = prefixLength;
return this;
}
public int prefixLength() {
return this.prefixLength;
}
public FuzzyQueryBuilder maxExpansions(int maxExpansions) {
this.maxExpansions = maxExpansions;
return this;
}
public int maxExpansions() {
return this.maxExpansions;
}
public FuzzyQueryBuilder transpositions(boolean transpositions) {
this.transpositions = transpositions;
return this;
}
public boolean transpositions() {
return this.transpositions;
}
public FuzzyQueryBuilder rewrite(String rewrite) {
this.rewrite = rewrite;
return this;
}
/**
* Sets the query name for the filter that can be used when searching for matched_filters per hit.
*/
public FuzzyQueryBuilder queryName(String queryName) {
this.queryName = queryName;
return this;
public String rewrite() {
return this.rewrite;
}
@Override
public void doXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(FuzzyQueryParser.NAME);
builder.startObject(name);
builder.field("value", value);
if (boost != -1) {
builder.field("boost", boost);
}
if (transpositions != null) {
builder.field("transpositions", transpositions);
}
if (fuzziness != null) {
fuzziness.toXContent(builder, params);
}
if (prefixLength != null) {
builder.field("prefix_length", prefixLength);
}
if (maxExpansions != null) {
builder.field("max_expansions", maxExpansions);
}
builder.startObject(NAME);
builder.startObject(fieldName);
builder.field("value", convertToStringIfBytesRef(this.value));
fuzziness.toXContent(builder, params);
builder.field("prefix_length", prefixLength);
builder.field("max_expansions", maxExpansions);
builder.field("transpositions", transpositions);
if (rewrite != null) {
builder.field("rewrite", rewrite);
}
if (queryName != null) {
builder.field("_name", queryName);
}
printBoostAndQueryName(builder);
builder.endObject();
builder.endObject();
}
}
@Override
public String getWriteableName() {
return NAME;
}
@Override
public Query doToQuery(QueryShardContext context) throws IOException {
Query query = null;
if (rewrite == null && context.isFilter()) {
rewrite = QueryParsers.CONSTANT_SCORE.getPreferredName();
}
MappedFieldType fieldType = context.fieldMapper(fieldName);
if (fieldType != null) {
query = fieldType.fuzzyQuery(value, fuzziness, prefixLength, maxExpansions, transpositions);
}
if (query == null) {
int maxEdits = fuzziness.asDistance(BytesRefs.toString(value));
query = new FuzzyQuery(new Term(fieldName, BytesRefs.toBytesRef(value)), maxEdits, prefixLength, maxExpansions, transpositions);
}
if (query instanceof MultiTermQuery) {
MultiTermQuery.RewriteMethod rewriteMethod = QueryParsers.parseRewriteMethod(context.parseFieldMatcher(), rewrite, null);
QueryParsers.setRewriteMethod((MultiTermQuery) query, rewriteMethod);
}
return query;
}
@Override
public FuzzyQueryBuilder doReadFrom(StreamInput in) throws IOException {
FuzzyQueryBuilder fuzzyQueryBuilder = new FuzzyQueryBuilder(in.readString(), in.readGenericValue());
fuzzyQueryBuilder.fuzziness = Fuzziness.readFuzzinessFrom(in);
fuzzyQueryBuilder.prefixLength = in.readVInt();
fuzzyQueryBuilder.maxExpansions = in.readVInt();
fuzzyQueryBuilder.transpositions = in.readBoolean();
fuzzyQueryBuilder.rewrite = in.readOptionalString();
return fuzzyQueryBuilder;
}
@Override
public void doWriteTo(StreamOutput out) throws IOException {
out.writeString(this.fieldName);
out.writeGenericValue(this.value);
this.fuzziness.writeTo(out);
out.writeVInt(this.prefixLength);
out.writeVInt(this.maxExpansions);
out.writeBoolean(this.transpositions);
out.writeOptionalString(this.rewrite);
}
@Override
public int doHashCode() {
return Objects.hash(fieldName, value, fuzziness, prefixLength, maxExpansions, transpositions, rewrite);
}
@Override
public boolean doEquals(FuzzyQueryBuilder other) {
return Objects.equals(fieldName, other.fieldName) &&
Objects.equals(value, other.value) &&
Objects.equals(fuzziness, other.fuzziness) &&
Objects.equals(prefixLength, other.prefixLength) &&
Objects.equals(maxExpansions, other.maxExpansions) &&
Objects.equals(transpositions, other.transpositions) &&
Objects.equals(rewrite, other.rewrite);
}
}

View File

@ -19,61 +19,42 @@
package org.elasticsearch.index.query;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.FuzzyQuery;
import org.apache.lucene.search.MultiTermQuery;
import org.apache.lucene.search.Query;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.BytesRefs;
import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.query.support.QueryParsers;
import java.io.IOException;
/**
*
*/
public class FuzzyQueryParser implements QueryParser {
public class FuzzyQueryParser implements QueryParser<FuzzyQueryBuilder> {
public static final String NAME = "fuzzy";
private static final Fuzziness DEFAULT_FUZZINESS = Fuzziness.AUTO;
private static final ParseField FUZZINESS = Fuzziness.FIELD.withDeprecation("min_similarity");
@Inject
public FuzzyQueryParser() {
}
@Override
public String[] names() {
return new String[]{NAME};
return new String[]{ FuzzyQueryBuilder.NAME };
}
@Override
public Query parse(QueryParseContext parseContext) throws IOException, ParsingException {
public FuzzyQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException {
XContentParser parser = parseContext.parser();
XContentParser.Token token = parser.nextToken();
if (token != XContentParser.Token.FIELD_NAME) {
throw new ParsingException(parseContext, "[fuzzy] query malformed, no field");
throw new ParsingException(parser.getTokenLocation(), "[fuzzy] query malformed, no field");
}
String fieldName = parser.currentName();
String fieldName = parser.currentName();
Object value = null;
float boost = 1.0f;
Fuzziness fuzziness = DEFAULT_FUZZINESS;
int prefixLength = FuzzyQuery.defaultPrefixLength;
int maxExpansions = FuzzyQuery.defaultMaxExpansions;
boolean transpositions = FuzzyQuery.defaultTranspositions;
Fuzziness fuzziness = FuzzyQueryBuilder.DEFAULT_FUZZINESS;
int prefixLength = FuzzyQueryBuilder.DEFAULT_PREFIX_LENGTH;
int maxExpansions = FuzzyQueryBuilder.DEFAULT_MAX_EXPANSIONS;
boolean transpositions = FuzzyQueryBuilder.DEFAULT_TRANSPOSITIONS;
String rewrite = null;
String queryName = null;
MultiTermQuery.RewriteMethod rewriteMethod = null;
if (parseContext.isFilter()) {
rewriteMethod = MultiTermQuery.CONSTANT_SCORE_REWRITE;
}
float boost = AbstractQueryBuilder.DEFAULT_BOOST;
token = parser.nextToken();
if (token == XContentParser.Token.START_OBJECT) {
String currentFieldName = null;
@ -94,13 +75,13 @@ public class FuzzyQueryParser implements QueryParser {
} else if ("max_expansions".equals(currentFieldName) || "maxExpansions".equals(currentFieldName)) {
maxExpansions = parser.intValue();
} else if ("transpositions".equals(currentFieldName)) {
transpositions = parser.booleanValue();
transpositions = parser.booleanValue();
} else if ("rewrite".equals(currentFieldName)) {
rewriteMethod = QueryParsers.parseRewriteMethod(parseContext.parseFieldMatcher(), parser.textOrNull(), null);
rewrite = parser.textOrNull();
} else if ("_name".equals(currentFieldName)) {
queryName = parser.text();
} else {
throw new ParsingException(parseContext, "[fuzzy] query does not support [" + currentFieldName + "]");
throw new ParsingException(parser.getTokenLocation(), "[fuzzy] query does not support [" + currentFieldName + "]");
}
}
}
@ -112,26 +93,20 @@ public class FuzzyQueryParser implements QueryParser {
}
if (value == null) {
throw new ParsingException(parseContext, "No value specified for fuzzy query");
throw new ParsingException(parser.getTokenLocation(), "no value specified for fuzzy query");
}
Query query = null;
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
if (fieldType != null) {
query = fieldType.fuzzyQuery(value, fuzziness, prefixLength, maxExpansions, transpositions);
}
if (query == null) {
int maxEdits = fuzziness.asDistance(BytesRefs.toString(value));
query = new FuzzyQuery(new Term(fieldName, BytesRefs.toBytesRef(value)), maxEdits, prefixLength, maxExpansions, transpositions);
}
if (query instanceof MultiTermQuery) {
QueryParsers.setRewriteMethod((MultiTermQuery) query, rewriteMethod);
}
query.setBoost(boost);
return new FuzzyQueryBuilder(fieldName, value)
.fuzziness(fuzziness)
.prefixLength(prefixLength)
.maxExpansions(maxExpansions)
.transpositions(transpositions)
.rewrite(rewrite)
.boost(boost)
.queryName(queryName);
}
if (queryName != null) {
parseContext.addNamedQuery(queryName, query);
}
return query;
@Override
public FuzzyQueryBuilder getBuilderPrototype() {
return FuzzyQueryBuilder.PROTOTYPE;
}
}

View File

@ -19,174 +19,319 @@
package org.elasticsearch.index.query;
import org.apache.lucene.search.Query;
import org.elasticsearch.Version;
import org.elasticsearch.common.Numbers;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.geo.GeoUtils;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
import org.elasticsearch.index.search.geo.InMemoryGeoBoundingBoxQuery;
import org.elasticsearch.index.search.geo.IndexedGeoBoundingBoxQuery;
import java.io.IOException;
import java.util.Objects;
public class GeoBoundingBoxQueryBuilder extends QueryBuilder {
/**
* Creates a Lucene query that will filter for all documents that lie within the specified
* bounding box.
*
* This query can only operate on fields of type geo_point that have latitude and longitude
* enabled.
* */
public class GeoBoundingBoxQueryBuilder extends AbstractQueryBuilder<GeoBoundingBoxQueryBuilder> {
/** Name of the query. */
public static final String NAME = "geo_bbox";
/** Default type for executing this query (memory as of this writing). */
public static final GeoExecType DEFAULT_TYPE = GeoExecType.MEMORY;
/** Needed for serialization. */
static final GeoBoundingBoxQueryBuilder PROTOTYPE = new GeoBoundingBoxQueryBuilder("");
public static final String TOP_LEFT = GeoBoundingBoxQueryParser.TOP_LEFT;
public static final String BOTTOM_RIGHT = GeoBoundingBoxQueryParser.BOTTOM_RIGHT;
/** Name of field holding geo coordinates to compute the bounding box on.*/
private final String fieldName;
/** Top left corner coordinates of bounding box. */
private GeoPoint topLeft = new GeoPoint(Double.NaN, Double.NaN);
/** Bottom right corner coordinates of bounding box.*/
private GeoPoint bottomRight = new GeoPoint(Double.NaN, Double.NaN);
/** How to deal with incorrect coordinates.*/
private GeoValidationMethod validationMethod = GeoValidationMethod.DEFAULT;
/** How the query should be run. */
private GeoExecType type = DEFAULT_TYPE;
private static final int TOP = 0;
private static final int LEFT = 1;
private static final int BOTTOM = 2;
private static final int RIGHT = 3;
private final String name;
private double[] box = {Double.NaN, Double.NaN, Double.NaN, Double.NaN};
private String queryName;
private String type;
private Boolean coerce;
private Boolean ignoreMalformed;
public GeoBoundingBoxQueryBuilder(String name) {
this.name = name;
/**
* Create new bounding box query.
* @param fieldName name of index field containing geo coordinates to operate on.
* */
public GeoBoundingBoxQueryBuilder(String fieldName) {
if (fieldName == null) {
throw new IllegalArgumentException("Field name must not be empty.");
}
this.fieldName = fieldName;
}
/**
* Adds top left point.
*
* @param lat The latitude
* @param lon The longitude
* @param top The top latitude
* @param left The left longitude
* @param bottom The bottom latitude
* @param right The right longitude
*/
public GeoBoundingBoxQueryBuilder topLeft(double lat, double lon) {
box[TOP] = lat;
box[LEFT] = lon;
public GeoBoundingBoxQueryBuilder setCorners(double top, double left, double bottom, double right) {
if (GeoValidationMethod.isIgnoreMalformed(validationMethod) == false) {
if (Numbers.isValidDouble(top) == false) {
throw new IllegalArgumentException("top latitude is invalid: " + top);
}
if (Numbers.isValidDouble(left) == false) {
throw new IllegalArgumentException("left longitude is invalid: " + left);
}
if (Numbers.isValidDouble(bottom) == false) {
throw new IllegalArgumentException("bottom latitude is invalid: " + bottom);
}
if (Numbers.isValidDouble(right) == false) {
throw new IllegalArgumentException("right longitude is invalid: " + right);
}
// all corners are valid after above checks - make sure they are in the right relation
if (top < bottom) {
throw new IllegalArgumentException("top is below bottom corner: " +
top + " vs. " + bottom);
}
// we do not check longitudes as the query generation code can deal with flipped left/right values
}
topLeft.reset(top, left);
bottomRight.reset(bottom, right);
return this;
}
public GeoBoundingBoxQueryBuilder topLeft(GeoPoint point) {
return topLeft(point.lat(), point.lon());
}
public GeoBoundingBoxQueryBuilder topLeft(String geohash) {
return topLeft(GeoPoint.fromGeohash(geohash));
}
/**
* Adds bottom right corner.
*
* @param lat The latitude
* @param lon The longitude
*/
public GeoBoundingBoxQueryBuilder bottomRight(double lat, double lon) {
box[BOTTOM] = lat;
box[RIGHT] = lon;
return this;
}
public GeoBoundingBoxQueryBuilder bottomRight(GeoPoint point) {
return bottomRight(point.lat(), point.lon());
}
public GeoBoundingBoxQueryBuilder bottomRight(String geohash) {
return bottomRight(GeoPoint.fromGeohash(geohash));
* Adds points.
* @param topLeft topLeft point to add.
* @param bottomRight bottomRight point to add.
* */
public GeoBoundingBoxQueryBuilder setCorners(GeoPoint topLeft, GeoPoint bottomRight) {
return setCorners(topLeft.getLat(), topLeft.getLon(), bottomRight.getLat(), bottomRight.getLon());
}
/**
* Adds bottom left corner.
* Adds points.
* @param topLeft topLeft point to add as geohash.
* @param bottomRight bottomRight point to add as geohash.
* */
public GeoBoundingBoxQueryBuilder setCorners(String topLeft, String bottomRight) {
return setCorners(GeoPoint.fromGeohash(topLeft), GeoPoint.fromGeohash(bottomRight));
}
/** Returns the top left corner of the bounding box. */
public GeoPoint topLeft() {
return topLeft;
}
/** Returns the bottom right corner of the bounding box. */
public GeoPoint bottomRight() {
return bottomRight;
}
/**
* Adds corners in OGC standard bbox/ envelop format.
*
* @param lat The latitude
* @param lon The longitude
* @param bottomLeft bottom left corner of bounding box.
* @param topRight top right corner of bounding box.
*/
public GeoBoundingBoxQueryBuilder bottomLeft(double lat, double lon) {
box[BOTTOM] = lat;
box[LEFT] = lon;
public GeoBoundingBoxQueryBuilder setCornersOGC(GeoPoint bottomLeft, GeoPoint topRight) {
return setCorners(topRight.getLat(), bottomLeft.getLon(), bottomLeft.getLat(), topRight.getLon());
}
/**
* Adds corners in OGC standard bbox/ envelop format.
*
* @param bottomLeft bottom left corner geohash.
* @param topRight top right corner geohash.
*/
public GeoBoundingBoxQueryBuilder setCornersOGC(String bottomLeft, String topRight) {
return setCornersOGC(GeoPoint.fromGeohash(bottomLeft), GeoPoint.fromGeohash(topRight));
}
/**
* Specify whether or not to ignore validation errors of bounding boxes.
* Can only be set if coerce set to false, otherwise calling this
* method has no effect.
**/
public GeoBoundingBoxQueryBuilder setValidationMethod(GeoValidationMethod method) {
this.validationMethod = method;
return this;
}
public GeoBoundingBoxQueryBuilder bottomLeft(GeoPoint point) {
return bottomLeft(point.lat(), point.lon());
}
public GeoBoundingBoxQueryBuilder bottomLeft(String geohash) {
return bottomLeft(GeoPoint.fromGeohash(geohash));
}
/**
* Adds top right point.
*
* @param lat The latitude
* @param lon The longitude
*/
public GeoBoundingBoxQueryBuilder topRight(double lat, double lon) {
box[TOP] = lat;
box[RIGHT] = lon;
return this;
}
public GeoBoundingBoxQueryBuilder topRight(GeoPoint point) {
return topRight(point.lat(), point.lon());
}
public GeoBoundingBoxQueryBuilder topRight(String geohash) {
return topRight(GeoPoint.fromGeohash(geohash));
}
/**
* Sets the filter name for the filter that can be used when searching for matched_filters per hit.
*/
public GeoBoundingBoxQueryBuilder queryName(String queryName) {
this.queryName = queryName;
return this;
}
public GeoBoundingBoxQueryBuilder coerce(boolean coerce) {
this.coerce = coerce;
return this;
}
public GeoBoundingBoxQueryBuilder ignoreMalformed(boolean ignoreMalformed) {
this.ignoreMalformed = ignoreMalformed;
return this;
* Returns geo coordinate validation method to use.
* */
public GeoValidationMethod getValidationMethod() {
return this.validationMethod;
}
/**
* Sets the type of executing of the geo bounding box. Can be either `memory` or `indexed`. Defaults
* to `memory`.
*/
public GeoBoundingBoxQueryBuilder type(String type) {
public GeoBoundingBoxQueryBuilder type(GeoExecType type) {
if (type == null) {
throw new IllegalArgumentException("Type is not allowed to be null.");
}
this.type = type;
return this;
}
/**
* For BWC: Parse type from type name.
* */
public GeoBoundingBoxQueryBuilder type(String type) {
this.type = GeoExecType.fromString(type);
return this;
}
/** Returns the execution type of the geo bounding box.*/
public GeoExecType type() {
return type;
}
/** Returns the name of the field to base the bounding box computation on. */
public String fieldName() {
return this.fieldName;
}
QueryValidationException checkLatLon(boolean indexCreatedBeforeV2_0) {
// validation was not available prior to 2.x, so to support bwc percolation queries we only ignore_malformed on 2.x created indexes
if (GeoValidationMethod.isIgnoreMalformed(validationMethod) == true || indexCreatedBeforeV2_0) {
return null;
}
QueryValidationException validationException = null;
// For everything post 2.0 validate latitude and longitude unless validation was explicitly turned off
if (GeoUtils.isValidLatitude(topLeft.getLat()) == false) {
validationException = addValidationError("top latitude is invalid: " + topLeft.getLat(),
validationException);
}
if (GeoUtils.isValidLongitude(topLeft.getLon()) == false) {
validationException = addValidationError("left longitude is invalid: " + topLeft.getLon(),
validationException);
}
if (GeoUtils.isValidLatitude(bottomRight.getLat()) == false) {
validationException = addValidationError("bottom latitude is invalid: " + bottomRight.getLat(),
validationException);
}
if (GeoUtils.isValidLongitude(bottomRight.getLon()) == false) {
validationException = addValidationError("right longitude is invalid: " + bottomRight.getLon(),
validationException);
}
return validationException;
}
@Override
public Query doToQuery(QueryShardContext context) {
QueryValidationException exception = checkLatLon(context.indexVersionCreated().before(Version.V_2_0_0));
if (exception != null) {
throw new QueryShardException(context, "couldn't validate latitude/ longitude values", exception);
}
GeoPoint luceneTopLeft = new GeoPoint(topLeft);
GeoPoint luceneBottomRight = new GeoPoint(bottomRight);
if (GeoValidationMethod.isCoerce(validationMethod)) {
// Special case: if the difference between the left and right is 360 and the right is greater than the left, we are asking for
// the complete longitude range so need to set longitude to the complete longditude range
double right = luceneBottomRight.getLon();
double left = luceneTopLeft.getLon();
boolean completeLonRange = ((right - left) % 360 == 0 && right > left);
GeoUtils.normalizePoint(luceneTopLeft, true, !completeLonRange);
GeoUtils.normalizePoint(luceneBottomRight, true, !completeLonRange);
if (completeLonRange) {
luceneTopLeft.resetLon(-180);
luceneBottomRight.resetLon(180);
}
}
MappedFieldType fieldType = context.fieldMapper(fieldName);
if (fieldType == null) {
throw new QueryShardException(context, "failed to find geo_point field [" + fieldName + "]");
}
if (!(fieldType instanceof GeoPointFieldMapper.GeoPointFieldType)) {
throw new QueryShardException(context, "field [" + fieldName + "] is not a geo_point field");
}
GeoPointFieldMapper.GeoPointFieldType geoFieldType = ((GeoPointFieldMapper.GeoPointFieldType) fieldType);
Query result;
switch(type) {
case INDEXED:
result = IndexedGeoBoundingBoxQuery.create(luceneTopLeft, luceneBottomRight, geoFieldType);
break;
case MEMORY:
IndexGeoPointFieldData indexFieldData = context.getForField(fieldType);
result = new InMemoryGeoBoundingBoxQuery(luceneTopLeft, luceneBottomRight, indexFieldData);
break;
default:
// Someone extended the type enum w/o adjusting this switch statement.
throw new IllegalStateException("geo bounding box type [" + type + "] not supported.");
}
return result;
}
@Override
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
// check values
if(Double.isNaN(box[TOP])) {
throw new IllegalArgumentException("geo_bounding_box requires top latitude to be set");
} else if(Double.isNaN(box[BOTTOM])) {
throw new IllegalArgumentException("geo_bounding_box requires bottom latitude to be set");
} else if(Double.isNaN(box[RIGHT])) {
throw new IllegalArgumentException("geo_bounding_box requires right longitude to be set");
} else if(Double.isNaN(box[LEFT])) {
throw new IllegalArgumentException("geo_bounding_box requires left longitude to be set");
}
builder.startObject(GeoBoundingBoxQueryParser.NAME);
builder.startObject(NAME);
builder.startObject(name);
builder.array(TOP_LEFT, box[LEFT], box[TOP]);
builder.array(BOTTOM_RIGHT, box[RIGHT], box[BOTTOM]);
builder.startObject(fieldName);
builder.array(GeoBoundingBoxQueryParser.TOP_LEFT, topLeft.getLon(), topLeft.getLat());
builder.array(GeoBoundingBoxQueryParser.BOTTOM_RIGHT, bottomRight.getLon(), bottomRight.getLat());
builder.endObject();
builder.field("validation_method", validationMethod);
builder.field("type", type);
if (queryName != null) {
builder.field("_name", queryName);
}
if (type != null) {
builder.field("type", type);
}
if (coerce != null) {
builder.field("coerce", coerce);
}
if (ignoreMalformed != null) {
builder.field("ignore_malformed", ignoreMalformed);
}
printBoostAndQueryName(builder);
builder.endObject();
}
@Override
public boolean doEquals(GeoBoundingBoxQueryBuilder other) {
return Objects.equals(topLeft, other.topLeft) &&
Objects.equals(bottomRight, other.bottomRight) &&
Objects.equals(type, other.type) &&
Objects.equals(validationMethod, other.validationMethod) &&
Objects.equals(fieldName, other.fieldName);
}
@Override
public int doHashCode() {
return Objects.hash(topLeft, bottomRight, type, validationMethod, fieldName);
}
@Override
public GeoBoundingBoxQueryBuilder doReadFrom(StreamInput in) throws IOException {
String fieldName = in.readString();
GeoBoundingBoxQueryBuilder geo = new GeoBoundingBoxQueryBuilder(fieldName);
geo.topLeft = geo.topLeft.readFrom(in);
geo.bottomRight = geo.bottomRight.readFrom(in);
geo.type = GeoExecType.readTypeFrom(in);
geo.validationMethod = GeoValidationMethod.readGeoValidationMethodFrom(in);
return geo;
}
@Override
public void doWriteTo(StreamOutput out) throws IOException {
out.writeString(fieldName);
topLeft.writeTo(out);
bottomRight.writeTo(out);
type.writeTo(out);
validationMethod.writeTo(out);
}
@Override
public String getWriteableName() {
return NAME;
}
}

View File

@ -19,57 +19,54 @@
package org.elasticsearch.index.query;
import org.apache.lucene.search.Query;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.Version;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.geo.GeoUtils;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
import org.elasticsearch.index.search.geo.InMemoryGeoBoundingBoxQuery;
import org.elasticsearch.index.search.geo.IndexedGeoBoundingBoxQuery;
import java.io.IOException;
/**
*
*/
public class GeoBoundingBoxQueryParser implements QueryParser {
public class GeoBoundingBoxQueryParser implements QueryParser<GeoBoundingBoxQueryBuilder> {
public static final String NAME = "geo_bbox";
/** Key to refer to the top of the bounding box. */
public static final String TOP = "top";
/** Key to refer to the left of the bounding box. */
public static final String LEFT = "left";
/** Key to refer to the right of the bounding box. */
public static final String RIGHT = "right";
/** Key to refer to the bottom of the bounding box. */
public static final String BOTTOM = "bottom";
/** Key to refer to top_left corner of bounding box. */
public static final String TOP_LEFT = TOP + "_" + LEFT;
public static final String TOP_RIGHT = TOP + "_" + RIGHT;
public static final String BOTTOM_LEFT = BOTTOM + "_" + LEFT;
/** Key to refer to bottom_right corner of bounding box. */
public static final String BOTTOM_RIGHT = BOTTOM + "_" + RIGHT;
/** Key to refer to top_right corner of bounding box. */
public static final String TOP_RIGHT = TOP + "_" + RIGHT;
/** Key to refer to bottom left corner of bounding box. */
public static final String BOTTOM_LEFT = BOTTOM + "_" + LEFT;
/** Key to refer to top_left corner of bounding box. */
public static final String TOPLEFT = "topLeft";
public static final String TOPRIGHT = "topRight";
public static final String BOTTOMLEFT = "bottomLeft";
/** Key to refer to bottom_right corner of bounding box. */
public static final String BOTTOMRIGHT = "bottomRight";
/** Key to refer to top_right corner of bounding box. */
public static final String TOPRIGHT = "topRight";
/** Key to refer to bottom left corner of bounding box. */
public static final String BOTTOMLEFT = "bottomLeft";
public static final String FIELD = "field";
@Inject
public GeoBoundingBoxQueryParser() {
}
@Override
public String[] names() {
return new String[]{NAME, "geoBbox", "geo_bounding_box", "geoBoundingBox"};
return new String[]{GeoBoundingBoxQueryBuilder.NAME, "geoBbox", "geo_bounding_box", "geoBoundingBox"};
}
@Override
public Query parse(QueryParseContext parseContext) throws IOException, ParsingException {
public GeoBoundingBoxQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException {
XContentParser parser = parseContext.parser();
String fieldName = null;
@ -78,16 +75,17 @@ public class GeoBoundingBoxQueryParser implements QueryParser {
double bottom = Double.NaN;
double left = Double.NaN;
double right = Double.NaN;
float boost = AbstractQueryBuilder.DEFAULT_BOOST;
String queryName = null;
String currentFieldName = null;
XContentParser.Token token;
final boolean indexCreatedBeforeV2_0 = parseContext.indexVersionCreated().before(Version.V_2_0_0);
boolean coerce = false;
boolean ignoreMalformed = false;
boolean coerce = GeoValidationMethod.DEFAULT_LENIENT_PARSING;
boolean ignoreMalformed = GeoValidationMethod.DEFAULT_LENIENT_PARSING;
GeoValidationMethod validationMethod = null;
GeoPoint sparse = new GeoPoint();
String type = "memory";
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
@ -140,74 +138,43 @@ public class GeoBoundingBoxQueryParser implements QueryParser {
} else if (token.isValue()) {
if ("_name".equals(currentFieldName)) {
queryName = parser.text();
} else if ("coerce".equals(currentFieldName) || (indexCreatedBeforeV2_0 && "normalize".equals(currentFieldName))) {
} else if ("boost".equals(currentFieldName)) {
boost = parser.floatValue();
} else if ("coerce".equals(currentFieldName) || ("normalize".equals(currentFieldName))) {
coerce = parser.booleanValue();
if (coerce == true) {
if (coerce) {
ignoreMalformed = true;
}
} else if ("validation_method".equals(currentFieldName)) {
validationMethod = GeoValidationMethod.fromString(parser.text());
} else if ("type".equals(currentFieldName)) {
type = parser.text();
} else if ("ignore_malformed".equals(currentFieldName) && coerce == false) {
} else if ("ignore_malformed".equals(currentFieldName)) {
ignoreMalformed = parser.booleanValue();
} else {
throw new ParsingException(parseContext, "failed to parse [{}] query. unexpected field [{}]", NAME, currentFieldName);
throw new ParsingException(parser.getTokenLocation(), "failed to parse [{}] query. unexpected field [{}]", NAME, currentFieldName);
}
}
}
final GeoPoint topLeft = sparse.reset(top, left); //just keep the object
final GeoPoint bottomRight = new GeoPoint(bottom, right);
// validation was not available prior to 2.x, so to support bwc percolation queries we only ignore_malformed on 2.x created indexes
if (!indexCreatedBeforeV2_0 && !ignoreMalformed) {
if (topLeft.lat() > 90.0 || topLeft.lat() < -90.0) {
throw new ParsingException(parseContext, "illegal latitude value [{}] for [{}]", topLeft.lat(), NAME);
}
if (topLeft.lon() > 180.0 || topLeft.lon() < -180) {
throw new ParsingException(parseContext, "illegal longitude value [{}] for [{}]", topLeft.lon(), NAME);
}
if (bottomRight.lat() > 90.0 || bottomRight.lat() < -90.0) {
throw new ParsingException(parseContext, "illegal latitude value [{}] for [{}]", bottomRight.lat(), NAME);
}
if (bottomRight.lon() > 180.0 || bottomRight.lon() < -180) {
throw new ParsingException(parseContext, "illegal longitude value [{}] for [{}]", bottomRight.lon(), NAME);
}
}
if (coerce) {
// Special case: if the difference between the left and right is 360 and the right is greater than the left, we are asking for
// the complete longitude range so need to set longitude to the complete longditude range
boolean completeLonRange = ((right - left) % 360 == 0 && right > left);
GeoUtils.normalizePoint(topLeft, true, !completeLonRange);
GeoUtils.normalizePoint(bottomRight, true, !completeLonRange);
if (completeLonRange) {
topLeft.resetLon(-180);
bottomRight.resetLon(180);
}
}
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
if (fieldType == null) {
throw new ParsingException(parseContext, "failed to parse [{}] query. could not find [{}] field [{}]", NAME, GeoPointFieldMapper.CONTENT_TYPE, fieldName);
}
if (!(fieldType instanceof GeoPointFieldMapper.GeoPointFieldType)) {
throw new ParsingException(parseContext, "failed to parse [{}] query. field [{}] is expected to be of type [{}], but is of [{}] type instead", NAME, fieldName, GeoPointFieldMapper.CONTENT_TYPE, fieldType.typeName());
}
GeoPointFieldMapper.GeoPointFieldType geoFieldType = ((GeoPointFieldMapper.GeoPointFieldType) fieldType);
Query filter;
if ("indexed".equals(type)) {
filter = IndexedGeoBoundingBoxQuery.create(topLeft, bottomRight, geoFieldType);
} else if ("memory".equals(type)) {
IndexGeoPointFieldData indexFieldData = parseContext.getForField(fieldType);
filter = new InMemoryGeoBoundingBoxQuery(topLeft, bottomRight, indexFieldData);
GeoBoundingBoxQueryBuilder builder = new GeoBoundingBoxQueryBuilder(fieldName);
builder.setCorners(topLeft, bottomRight);
builder.queryName(queryName);
builder.boost(boost);
builder.type(GeoExecType.fromString(type));
if (validationMethod != null) {
// ignore deprecated coerce/ignoreMalformed settings if validationMethod is set
builder.setValidationMethod(validationMethod);
} else {
throw new ParsingException(parseContext, "failed to parse [{}] query. geo bounding box type [{}] is not supported. either [indexed] or [memory] are allowed", NAME, type);
builder.setValidationMethod(GeoValidationMethod.infer(coerce, ignoreMalformed));
}
return builder;
}
if (queryName != null) {
parseContext.addNamedQuery(queryName, filter);
}
return filter;
}
@Override
public GeoBoundingBoxQueryBuilder getBuilderPrototype() {
return GeoBoundingBoxQueryBuilder.PROTOTYPE;
}
}

View File

@ -19,122 +19,283 @@
package org.elasticsearch.index.query;
import org.apache.lucene.search.Query;
import org.elasticsearch.Version;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.geo.GeoDistance;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.geo.GeoUtils;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
import org.elasticsearch.index.search.geo.GeoDistanceRangeQuery;
import java.io.IOException;
import java.util.Locale;
import java.util.Objects;
public class GeoDistanceQueryBuilder extends QueryBuilder {
/**
* Filter results of a query to include only those within a specific distance to some
* geo point.
* */
public class GeoDistanceQueryBuilder extends AbstractQueryBuilder<GeoDistanceQueryBuilder> {
private final String name;
/** Name of the query in the query dsl. */
public static final String NAME = "geo_distance";
/** Default for latitude normalization (as of this writing true).*/
public static final boolean DEFAULT_NORMALIZE_LAT = true;
/** Default for longitude normalization (as of this writing true). */
public static final boolean DEFAULT_NORMALIZE_LON = true;
/** Default for distance unit computation. */
public static final DistanceUnit DEFAULT_DISTANCE_UNIT = DistanceUnit.DEFAULT;
/** Default for geo distance computation. */
public static final GeoDistance DEFAULT_GEO_DISTANCE = GeoDistance.DEFAULT;
/** Default for optimising query through pre computed bounding box query. */
public static final String DEFAULT_OPTIMIZE_BBOX = "memory";
private String distance;
private final String fieldName;
/** Distance from center to cover. */
private double distance;
/** Point to use as center. */
private GeoPoint center = new GeoPoint(Double.NaN, Double.NaN);
/** Algorithm to use for distance computation. */
private GeoDistance geoDistance = DEFAULT_GEO_DISTANCE;
/** Whether or not to use a bbox for pre-filtering. TODO change to enum? */
private String optimizeBbox = DEFAULT_OPTIMIZE_BBOX;
/** How strict should geo coordinate validation be? */
private GeoValidationMethod validationMethod = GeoValidationMethod.DEFAULT;
private double lat;
static final GeoDistanceQueryBuilder PROTOTYPE = new GeoDistanceQueryBuilder("_na_");
private double lon;
private String geohash;
private GeoDistance geoDistance;
private String optimizeBbox;
private String queryName;
private Boolean coerce;
private Boolean ignoreMalformed;
public GeoDistanceQueryBuilder(String name) {
this.name = name;
/**
* Construct new GeoDistanceQueryBuilder.
* @param fieldName name of indexed geo field to operate distance computation on.
* */
public GeoDistanceQueryBuilder(String fieldName) {
if (Strings.isEmpty(fieldName)) {
throw new IllegalArgumentException("fieldName must not be null or empty");
}
this.fieldName = fieldName;
}
/** Name of the field this query is operating on. */
public String fieldName() {
return this.fieldName;
}
/** Sets the center point for the query.
* @param point the center of the query
**/
public GeoDistanceQueryBuilder point(GeoPoint point) {
if (point == null) {
throw new IllegalArgumentException("center point must not be null");
}
this.center = point;
return this;
}
/**
* Sets the center point of the query.
* @param lat latitude of center
* @param lon longitude of center
* */
public GeoDistanceQueryBuilder point(double lat, double lon) {
this.lat = lat;
this.lon = lon;
this.center = new GeoPoint(lat, lon);
return this;
}
public GeoDistanceQueryBuilder lat(double lat) {
this.lat = lat;
return this;
}
public GeoDistanceQueryBuilder lon(double lon) {
this.lon = lon;
return this;
/** Returns the center point of the distance query. */
public GeoPoint point() {
return this.center;
}
/** Sets the distance from the center using the default distance unit.*/
public GeoDistanceQueryBuilder distance(String distance) {
this.distance = distance;
return distance(distance, DistanceUnit.DEFAULT);
}
/** Sets the distance from the center for this query. */
public GeoDistanceQueryBuilder distance(String distance, DistanceUnit unit) {
if (Strings.isEmpty(distance)) {
throw new IllegalArgumentException("distance must not be null or empty");
}
if (unit == null) {
throw new IllegalArgumentException("distance unit must not be null");
}
this.distance = DistanceUnit.parse(distance, unit, DistanceUnit.DEFAULT);
return this;
}
/** Sets the distance from the center for this query. */
public GeoDistanceQueryBuilder distance(double distance, DistanceUnit unit) {
this.distance = unit.toString(distance);
return this;
return distance(Double.toString(distance), unit);
}
/** Returns the distance configured as radius. */
public double distance() {
return distance;
}
/** Sets the center point for this query. */
public GeoDistanceQueryBuilder geohash(String geohash) {
this.geohash = geohash;
if (Strings.isEmpty(geohash)) {
throw new IllegalArgumentException("geohash must not be null or empty");
}
this.center.resetFromGeoHash(geohash);
return this;
}
/** Which type of geo distance calculation method to use. */
public GeoDistanceQueryBuilder geoDistance(GeoDistance geoDistance) {
if (geoDistance == null) {
throw new IllegalArgumentException("geoDistance must not be null");
}
this.geoDistance = geoDistance;
return this;
}
/** Returns geo distance calculation type to use. */
public GeoDistance geoDistance() {
return this.geoDistance;
}
/**
* Set this to memory or indexed if before running the distance
* calculation you want to limit the candidates to hits in the
* enclosing bounding box.
**/
public GeoDistanceQueryBuilder optimizeBbox(String optimizeBbox) {
if (optimizeBbox == null) {
throw new IllegalArgumentException("optimizeBox must not be null");
}
switch (optimizeBbox) {
case "none":
case "memory":
case "indexed":
break;
default:
throw new IllegalArgumentException("optimizeBbox must be one of [none, memory, indexed]");
}
this.optimizeBbox = optimizeBbox;
return this;
}
/**
* Sets the filter name for the filter that can be used when searching for matched_filters per hit.
*/
public GeoDistanceQueryBuilder queryName(String queryName) {
this.queryName = queryName;
return this;
* Returns whether or not to run a BoundingBox query prior to
* distance query for optimization purposes.*/
public String optimizeBbox() {
return this.optimizeBbox;
}
public GeoDistanceQueryBuilder coerce(boolean coerce) {
this.coerce = coerce;
return this;
/** Set validaton method for geo coordinates. */
public void setValidationMethod(GeoValidationMethod method) {
this.validationMethod = method;
}
public GeoDistanceQueryBuilder ignoreMalformed(boolean ignoreMalformed) {
this.ignoreMalformed = ignoreMalformed;
return this;
/** Returns validation method for geo coordinates. */
public GeoValidationMethod getValidationMethod() {
return this.validationMethod;
}
@Override
protected Query doToQuery(QueryShardContext shardContext) throws IOException {
QueryValidationException exception = checkLatLon(shardContext.indexVersionCreated().before(Version.V_2_0_0));
if (exception != null) {
throw new QueryShardException(shardContext, "couldn't validate latitude/ longitude values", exception);
}
if (GeoValidationMethod.isCoerce(validationMethod)) {
GeoUtils.normalizePoint(center, true, true);
}
double normDistance = geoDistance.normalize(this.distance, DistanceUnit.DEFAULT);
MappedFieldType fieldType = shardContext.fieldMapper(fieldName);
if (fieldType == null) {
throw new QueryShardException(shardContext, "failed to find geo_point field [" + fieldName + "]");
}
if (!(fieldType instanceof GeoPointFieldMapper.GeoPointFieldType)) {
throw new QueryShardException(shardContext, "field [" + fieldName + "] is not a geo_point field");
}
GeoPointFieldMapper.GeoPointFieldType geoFieldType = ((GeoPointFieldMapper.GeoPointFieldType) fieldType);
IndexGeoPointFieldData indexFieldData = shardContext.getForField(fieldType);
Query query = new GeoDistanceRangeQuery(center, null, normDistance, true, false, geoDistance, geoFieldType, indexFieldData, optimizeBbox);
return query;
}
@Override
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(GeoDistanceQueryParser.NAME);
if (geohash != null) {
builder.field(name, geohash);
} else {
builder.startArray(name).value(lon).value(lat).endArray();
}
builder.startObject(NAME);
builder.startArray(fieldName).value(center.lon()).value(center.lat()).endArray();
builder.field("distance", distance);
if (geoDistance != null) {
builder.field("distance_type", geoDistance.name().toLowerCase(Locale.ROOT));
}
if (optimizeBbox != null) {
builder.field("optimize_bbox", optimizeBbox);
}
if (queryName != null) {
builder.field("_name", queryName);
}
if (coerce != null) {
builder.field("coerce", coerce);
}
if (ignoreMalformed != null) {
builder.field("ignore_malformed", ignoreMalformed);
}
builder.field("distance_type", geoDistance.name().toLowerCase(Locale.ROOT));
builder.field("optimize_bbox", optimizeBbox);
builder.field("validation_method", validationMethod);
printBoostAndQueryName(builder);
builder.endObject();
}
@Override
public int doHashCode() {
return Objects.hash(center, geoDistance, optimizeBbox, distance, validationMethod);
}
@Override
public boolean doEquals(GeoDistanceQueryBuilder other) {
return Objects.equals(fieldName, other.fieldName) &&
(distance == other.distance) &&
Objects.equals(validationMethod, other.validationMethod) &&
Objects.equals(center, other.center) &&
Objects.equals(optimizeBbox, other.optimizeBbox) &&
Objects.equals(geoDistance, other.geoDistance);
}
@Override
protected GeoDistanceQueryBuilder doReadFrom(StreamInput in) throws IOException {
String fieldName = in.readString();
GeoDistanceQueryBuilder result = new GeoDistanceQueryBuilder(fieldName);
result.distance = in.readDouble();
result.validationMethod = GeoValidationMethod.readGeoValidationMethodFrom(in);
result.center = GeoPoint.readGeoPointFrom(in);
result.optimizeBbox = in.readString();
result.geoDistance = GeoDistance.readGeoDistanceFrom(in);
return result;
}
@Override
protected void doWriteTo(StreamOutput out) throws IOException {
out.writeString(fieldName);
out.writeDouble(distance);
validationMethod.writeTo(out);
center.writeTo(out);
out.writeString(optimizeBbox);
geoDistance.writeTo(out);
}
private QueryValidationException checkLatLon(boolean indexCreatedBeforeV2_0) {
// validation was not available prior to 2.x, so to support bwc percolation queries we only ignore_malformed on 2.x created indexes
if (GeoValidationMethod.isIgnoreMalformed(validationMethod) || indexCreatedBeforeV2_0) {
return null;
}
QueryValidationException validationException = null;
// For everything post 2.0, validate latitude and longitude unless validation was explicitly turned off
if (GeoUtils.isValidLatitude(center.getLat()) == false) {
validationException = addValidationError("center point latitude is invalid: " + center.getLat(), validationException);
}
if (GeoUtils.isValidLongitude(center.getLon()) == false) {
validationException = addValidationError("center point longitude is invalid: " + center.getLon(), validationException);
}
return validationException;
}
@Override
public String getWriteableName() {
return NAME;
}
}

View File

@ -19,23 +19,19 @@
package org.elasticsearch.index.query;
import org.apache.lucene.search.Query;
import org.elasticsearch.Version;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.geo.GeoDistance;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.geo.GeoUtils;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
import org.elasticsearch.index.search.geo.GeoDistanceRangeQuery;
import java.io.IOException;
/**
* Parses a GeoDistanceQuery. See also
*
* <pre>
* {
* "name.lat" : 1.1,
@ -43,37 +39,32 @@ import java.io.IOException;
* }
* </pre>
*/
public class GeoDistanceQueryParser implements QueryParser {
public static final String NAME = "geo_distance";
@Inject
public GeoDistanceQueryParser() {
}
public class GeoDistanceQueryParser implements QueryParser<GeoDistanceQueryBuilder> {
@Override
public String[] names() {
return new String[]{NAME, "geoDistance"};
return new String[]{GeoDistanceQueryBuilder.NAME, "geoDistance"};
}
@Override
public Query parse(QueryParseContext parseContext) throws IOException, ParsingException {
public GeoDistanceQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException {
XContentParser parser = parseContext.parser();
XContentParser.Token token;
float boost = AbstractQueryBuilder.DEFAULT_BOOST;
String queryName = null;
String currentFieldName = null;
GeoPoint point = new GeoPoint();
GeoPoint point = new GeoPoint(Double.NaN, Double.NaN);
String fieldName = null;
double distance = 0;
Object vDistance = null;
DistanceUnit unit = DistanceUnit.DEFAULT;
GeoDistance geoDistance = GeoDistance.DEFAULT;
String optimizeBbox = "memory";
final boolean indexCreatedBeforeV2_0 = parseContext.indexVersionCreated().before(Version.V_2_0_0);
boolean coerce = false;
boolean ignoreMalformed = false;
DistanceUnit unit = GeoDistanceQueryBuilder.DEFAULT_DISTANCE_UNIT;
GeoDistance geoDistance = GeoDistanceQueryBuilder.DEFAULT_GEO_DISTANCE;
String optimizeBbox = GeoDistanceQueryBuilder.DEFAULT_OPTIMIZE_BBOX;
boolean coerce = GeoValidationMethod.DEFAULT_LENIENT_PARSING;
boolean ignoreMalformed = GeoValidationMethod.DEFAULT_LENIENT_PARSING;
GeoValidationMethod validationMethod = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
@ -85,6 +76,7 @@ public class GeoDistanceQueryParser implements QueryParser {
} else if (token == XContentParser.Token.START_OBJECT) {
// the json in the format of -> field : { lat : 30, lon : 12 }
String currentName = parser.currentName();
assert currentFieldName != null;
fieldName = currentFieldName;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
@ -97,21 +89,21 @@ public class GeoDistanceQueryParser implements QueryParser {
} else if (currentName.equals(GeoPointFieldMapper.Names.GEOHASH)) {
point.resetFromGeoHash(parser.text());
} else {
throw new ParsingException(parseContext, "[geo_distance] query does not support [" + currentFieldName
throw new ParsingException(parser.getTokenLocation(), "[geo_distance] query does not support [" + currentFieldName
+ "]");
}
}
}
} else if (token.isValue()) {
if (currentFieldName.equals("distance")) {
if ("distance".equals(currentFieldName)) {
if (token == XContentParser.Token.VALUE_STRING) {
vDistance = parser.text(); // a String
} else {
vDistance = parser.numberValue(); // a Number
}
} else if (currentFieldName.equals("unit")) {
} else if ("unit".equals(currentFieldName)) {
unit = DistanceUnit.fromString(parser.text());
} else if (currentFieldName.equals("distance_type") || currentFieldName.equals("distanceType")) {
} else if ("distance_type".equals(currentFieldName) || "distanceType".equals(currentFieldName)) {
geoDistance = GeoDistance.fromString(parser.text());
} else if (currentFieldName.endsWith(GeoPointFieldMapper.Names.LAT_SUFFIX)) {
point.resetLat(parser.doubleValue());
@ -124,15 +116,19 @@ public class GeoDistanceQueryParser implements QueryParser {
fieldName = currentFieldName.substring(0, currentFieldName.length() - GeoPointFieldMapper.Names.GEOHASH_SUFFIX.length());
} else if ("_name".equals(currentFieldName)) {
queryName = parser.text();
} else if ("boost".equals(currentFieldName)) {
boost = parser.floatValue();
} else if ("optimize_bbox".equals(currentFieldName) || "optimizeBbox".equals(currentFieldName)) {
optimizeBbox = parser.textOrNull();
} else if ("coerce".equals(currentFieldName) || (indexCreatedBeforeV2_0 && "normalize".equals(currentFieldName))) {
} else if ("coerce".equals(currentFieldName) || ("normalize".equals(currentFieldName))) {
coerce = parser.booleanValue();
if (coerce == true) {
ignoreMalformed = true;
}
} else if ("ignore_malformed".equals(currentFieldName) && coerce == false) {
} else if ("ignore_malformed".equals(currentFieldName)) {
ignoreMalformed = parser.booleanValue();
} else if ("validation_method".equals(currentFieldName)) {
validationMethod = GeoValidationMethod.fromString(parser.text());
} else {
point.resetFromString(parser.text());
fieldName = currentFieldName;
@ -140,44 +136,31 @@ public class GeoDistanceQueryParser implements QueryParser {
}
}
// validation was not available prior to 2.x, so to support bwc percolation queries we only ignore_malformed on 2.x created indexes
if (!indexCreatedBeforeV2_0 && !ignoreMalformed) {
if (point.lat() > 90.0 || point.lat() < -90.0) {
throw new ParsingException(parseContext, "illegal latitude value [{}] for [{}]", point.lat(), NAME);
}
if (point.lon() > 180.0 || point.lon() < -180) {
throw new ParsingException(parseContext, "illegal longitude value [{}] for [{}]", point.lon(), NAME);
}
}
if (coerce) {
GeoUtils.normalizePoint(point, coerce, coerce);
}
if (vDistance == null) {
throw new ParsingException(parseContext, "geo_distance requires 'distance' to be specified");
} else if (vDistance instanceof Number) {
distance = DistanceUnit.DEFAULT.convert(((Number) vDistance).doubleValue(), unit);
throw new ParsingException(parser.getTokenLocation(), "geo_distance requires 'distance' to be specified");
}
GeoDistanceQueryBuilder qb = new GeoDistanceQueryBuilder(fieldName);
if (vDistance instanceof Number) {
qb.distance(((Number) vDistance).doubleValue(), unit);
} else {
distance = DistanceUnit.parse((String) vDistance, unit, DistanceUnit.DEFAULT);
qb.distance((String) vDistance, unit);
}
distance = geoDistance.normalize(distance, DistanceUnit.DEFAULT);
qb.point(point);
if (validationMethod != null) {
qb.setValidationMethod(validationMethod);
} else {
qb.setValidationMethod(GeoValidationMethod.infer(coerce, ignoreMalformed));
}
qb.optimizeBbox(optimizeBbox);
qb.geoDistance(geoDistance);
qb.boost(boost);
qb.queryName(queryName);
return qb;
}
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
if (fieldType == null) {
throw new ParsingException(parseContext, "failed to find geo_point field [" + fieldName + "]");
}
if (!(fieldType instanceof GeoPointFieldMapper.GeoPointFieldType)) {
throw new ParsingException(parseContext, "field [" + fieldName + "] is not a geo_point field");
}
GeoPointFieldMapper.GeoPointFieldType geoFieldType = ((GeoPointFieldMapper.GeoPointFieldType) fieldType);
IndexGeoPointFieldData indexFieldData = parseContext.getForField(fieldType);
Query query = new GeoDistanceRangeQuery(point, null, distance, true, false, geoDistance, geoFieldType, indexFieldData, optimizeBbox);
if (queryName != null) {
parseContext.addNamedQuery(queryName, query);
}
return query;
@Override
public GeoDistanceQueryBuilder getBuilderPrototype() {
return GeoDistanceQueryBuilder.PROTOTYPE;
}
}

View File

@ -19,161 +19,309 @@
package org.elasticsearch.index.query;
import org.apache.lucene.search.Query;
import org.elasticsearch.Version;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.geo.GeoDistance;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.geo.GeoUtils;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
import org.elasticsearch.index.search.geo.GeoDistanceRangeQuery;
import java.io.IOException;
import java.util.Locale;
import java.util.Objects;
public class GeoDistanceRangeQueryBuilder extends QueryBuilder {
public class GeoDistanceRangeQueryBuilder extends AbstractQueryBuilder<GeoDistanceRangeQueryBuilder> {
private final String name;
public static final String NAME = "geo_distance_range";
public static final boolean DEFAULT_INCLUDE_LOWER = true;
public static final boolean DEFAULT_INCLUDE_UPPER = true;
public static final GeoDistance DEFAULT_GEO_DISTANCE = GeoDistance.DEFAULT;
public static final DistanceUnit DEFAULT_UNIT = DistanceUnit.DEFAULT;
public static final String DEFAULT_OPTIMIZE_BBOX = "memory";
private final String fieldName;
private Object from;
private Object to;
private boolean includeLower = true;
private boolean includeUpper = true;
private boolean includeLower = DEFAULT_INCLUDE_LOWER;
private boolean includeUpper = DEFAULT_INCLUDE_UPPER;
private double lat;
private final GeoPoint point;
private double lon;
private GeoDistance geoDistance = DEFAULT_GEO_DISTANCE;
private String geohash;
private DistanceUnit unit = DEFAULT_UNIT;
private GeoDistance geoDistance;
private String optimizeBbox = DEFAULT_OPTIMIZE_BBOX;
private String queryName;
private GeoValidationMethod validationMethod = GeoValidationMethod.DEFAULT;
private String optimizeBbox;
static final GeoDistanceRangeQueryBuilder PROTOTYPE = new GeoDistanceRangeQueryBuilder("_na_", new GeoPoint());
private Boolean coerce;
private Boolean ignoreMalformed;
public GeoDistanceRangeQueryBuilder(String name) {
this.name = name;
public GeoDistanceRangeQueryBuilder(String fieldName, GeoPoint point) {
if (Strings.isEmpty(fieldName)) {
throw new IllegalArgumentException("fieldName must not be null");
}
if (point == null) {
throw new IllegalArgumentException("point must not be null");
}
this.fieldName = fieldName;
this.point = point;
}
public GeoDistanceRangeQueryBuilder point(double lat, double lon) {
this.lat = lat;
this.lon = lon;
return this;
public GeoDistanceRangeQueryBuilder(String fieldName, double lat, double lon) {
this(fieldName, new GeoPoint(lat, lon));
}
public GeoDistanceRangeQueryBuilder lat(double lat) {
this.lat = lat;
return this;
public GeoDistanceRangeQueryBuilder(String fieldName, String geohash) {
this(fieldName, geohash == null ? null : new GeoPoint().resetFromGeoHash(geohash));
}
public GeoDistanceRangeQueryBuilder lon(double lon) {
this.lon = lon;
return this;
public String fieldName() {
return fieldName;
}
public GeoDistanceRangeQueryBuilder from(Object from) {
public GeoPoint point() {
return point;
}
public GeoDistanceRangeQueryBuilder from(String from) {
if (from == null) {
throw new IllegalArgumentException("[from] must not be null");
}
this.from = from;
return this;
}
public GeoDistanceRangeQueryBuilder to(Object to) {
this.to = to;
return this;
}
public GeoDistanceRangeQueryBuilder gt(Object from) {
public GeoDistanceRangeQueryBuilder from(Number from) {
if (from == null) {
throw new IllegalArgumentException("[from] must not be null");
}
this.from = from;
this.includeLower = false;
return this;
}
public GeoDistanceRangeQueryBuilder gte(Object from) {
this.from = from;
this.includeLower = true;
return this;
public Object from() {
return from;
}
public GeoDistanceRangeQueryBuilder lt(Object to) {
public GeoDistanceRangeQueryBuilder to(String to) {
if (to == null) {
throw new IllegalArgumentException("[to] must not be null");
}
this.to = to;
this.includeUpper = false;
return this;
}
public GeoDistanceRangeQueryBuilder lte(Object to) {
public GeoDistanceRangeQueryBuilder to(Number to) {
if (to == null) {
throw new IllegalArgumentException("[to] must not be null");
}
this.to = to;
this.includeUpper = true;
return this;
}
public Object to() {
return to;
}
public GeoDistanceRangeQueryBuilder includeLower(boolean includeLower) {
this.includeLower = includeLower;
return this;
}
public boolean includeLower() {
return includeLower;
}
public GeoDistanceRangeQueryBuilder includeUpper(boolean includeUpper) {
this.includeUpper = includeUpper;
return this;
}
public GeoDistanceRangeQueryBuilder geohash(String geohash) {
this.geohash = geohash;
return this;
public boolean includeUpper() {
return includeUpper;
}
public GeoDistanceRangeQueryBuilder geoDistance(GeoDistance geoDistance) {
if (geoDistance == null) {
throw new IllegalArgumentException("geoDistance calculation mode must not be null");
}
this.geoDistance = geoDistance;
return this;
}
public GeoDistance geoDistance() {
return geoDistance;
}
public GeoDistanceRangeQueryBuilder unit(DistanceUnit unit) {
if (unit == null) {
throw new IllegalArgumentException("distance unit must not be null");
}
this.unit = unit;
return this;
}
public DistanceUnit unit() {
return unit;
}
public GeoDistanceRangeQueryBuilder optimizeBbox(String optimizeBbox) {
if (optimizeBbox == null) {
throw new IllegalArgumentException("optimizeBox must not be null");
}
switch (optimizeBbox) {
case "none":
case "memory":
case "indexed":
break;
default:
throw new IllegalArgumentException("optimizeBbox must be one of [none, memory, indexed]");
}
this.optimizeBbox = optimizeBbox;
return this;
}
public GeoDistanceRangeQueryBuilder coerce(boolean coerce) {
this.coerce = coerce;
return this;
public String optimizeBbox() {
return optimizeBbox;
}
public GeoDistanceRangeQueryBuilder ignoreMalformed(boolean ignoreMalformed) {
this.ignoreMalformed = ignoreMalformed;
/** Set validation method for coordinates. */
public GeoDistanceRangeQueryBuilder setValidationMethod(GeoValidationMethod method) {
this.validationMethod = method;
return this;
}
/** Returns validation method for coordinates. */
public GeoValidationMethod getValidationMethod(GeoValidationMethod method) {
return this.validationMethod;
}
/**
* Sets the filter name for the filter that can be used when searching for matched_filters per hit.
*/
public GeoDistanceRangeQueryBuilder queryName(String queryName) {
this.queryName = queryName;
return this;
@Override
protected Query doToQuery(QueryShardContext context) throws IOException {
final boolean indexCreatedBeforeV2_0 = context.indexVersionCreated().before(Version.V_2_0_0);
// validation was not available prior to 2.x, so to support bwc
// percolation queries we only ignore_malformed on 2.x created indexes
if (!indexCreatedBeforeV2_0 && !GeoValidationMethod.isIgnoreMalformed(validationMethod)) {
if (!GeoUtils.isValidLatitude(point.lat())) {
throw new QueryShardException(context, "illegal latitude value [{}] for [{}]", point.lat(), NAME);
}
if (!GeoUtils.isValidLongitude(point.lon())) {
throw new QueryShardException(context, "illegal longitude value [{}] for [{}]", point.lon(), NAME);
}
}
if (GeoValidationMethod.isCoerce(validationMethod)) {
GeoUtils.normalizePoint(point, true, true);
}
Double fromValue = null;
Double toValue = null;
if (from != null) {
if (from instanceof Number) {
fromValue = unit.toMeters(((Number) from).doubleValue());
} else {
fromValue = DistanceUnit.parse((String) from, unit, DistanceUnit.DEFAULT);
}
fromValue = geoDistance.normalize(fromValue, DistanceUnit.DEFAULT);
}
if (to != null) {
if (to instanceof Number) {
toValue = unit.toMeters(((Number) to).doubleValue());
} else {
toValue = DistanceUnit.parse((String) to, unit, DistanceUnit.DEFAULT);
}
toValue = geoDistance.normalize(toValue, DistanceUnit.DEFAULT);
}
MappedFieldType fieldType = context.fieldMapper(fieldName);
if (fieldType == null) {
throw new QueryShardException(context, "failed to find geo_point field [" + fieldName + "]");
}
if (!(fieldType instanceof GeoPointFieldMapper.GeoPointFieldType)) {
throw new QueryShardException(context, "field [" + fieldName + "] is not a geo_point field");
}
GeoPointFieldMapper.GeoPointFieldType geoFieldType = ((GeoPointFieldMapper.GeoPointFieldType) fieldType);
IndexGeoPointFieldData indexFieldData = context.getForField(fieldType);
return new GeoDistanceRangeQuery(point, fromValue, toValue, includeLower, includeUpper, geoDistance, geoFieldType,
indexFieldData, optimizeBbox);
}
@Override
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(GeoDistanceRangeQueryParser.NAME);
if (geohash != null) {
builder.field(name, geohash);
} else {
builder.startArray(name).value(lon).value(lat).endArray();
}
builder.field("from", from);
builder.field("to", to);
builder.field("include_lower", includeLower);
builder.field("include_upper", includeUpper);
if (geoDistance != null) {
builder.field("distance_type", geoDistance.name().toLowerCase(Locale.ROOT));
}
if (optimizeBbox != null) {
builder.field("optimize_bbox", optimizeBbox);
}
if (queryName != null) {
builder.field("_name", queryName);
}
if (coerce != null) {
builder.field("coerce", coerce);
}
if (ignoreMalformed != null) {
builder.field("ignore_malformed", ignoreMalformed);
}
builder.startObject(NAME);
builder.startArray(fieldName).value(point.lon()).value(point.lat()).endArray();
builder.field(GeoDistanceRangeQueryParser.FROM_FIELD.getPreferredName(), from);
builder.field(GeoDistanceRangeQueryParser.TO_FIELD.getPreferredName(), to);
builder.field(GeoDistanceRangeQueryParser.INCLUDE_LOWER_FIELD.getPreferredName(), includeLower);
builder.field(GeoDistanceRangeQueryParser.INCLUDE_UPPER_FIELD.getPreferredName(), includeUpper);
builder.field(GeoDistanceRangeQueryParser.UNIT_FIELD.getPreferredName(), unit);
builder.field(GeoDistanceRangeQueryParser.DISTANCE_TYPE_FIELD.getPreferredName(), geoDistance.name().toLowerCase(Locale.ROOT));
builder.field(GeoDistanceRangeQueryParser.OPTIMIZE_BBOX_FIELD.getPreferredName(), optimizeBbox);
builder.field(GeoDistanceRangeQueryParser.VALIDATION_METHOD.getPreferredName(), validationMethod);
printBoostAndQueryName(builder);
builder.endObject();
}
@Override
protected GeoDistanceRangeQueryBuilder doReadFrom(StreamInput in) throws IOException {
GeoDistanceRangeQueryBuilder queryBuilder = new GeoDistanceRangeQueryBuilder(in.readString(), GeoPoint.readGeoPointFrom(in));
queryBuilder.from = in.readGenericValue();
queryBuilder.to = in.readGenericValue();
queryBuilder.includeLower = in.readBoolean();
queryBuilder.includeUpper = in.readBoolean();
queryBuilder.unit = DistanceUnit.valueOf(in.readString());
queryBuilder.geoDistance = GeoDistance.readGeoDistanceFrom(in);
queryBuilder.optimizeBbox = in.readString();
queryBuilder.validationMethod = GeoValidationMethod.readGeoValidationMethodFrom(in);
return queryBuilder;
}
@Override
protected void doWriteTo(StreamOutput out) throws IOException {
out.writeString(fieldName);
point.writeTo(out);
out.writeGenericValue(from);
out.writeGenericValue(to);
out.writeBoolean(includeLower);
out.writeBoolean(includeUpper);
out.writeString(unit.name());
geoDistance.writeTo(out);;
out.writeString(optimizeBbox);
validationMethod.writeTo(out);
}
@Override
protected boolean doEquals(GeoDistanceRangeQueryBuilder other) {
return ((Objects.equals(fieldName, other.fieldName)) &&
(Objects.equals(point, other.point)) &&
(Objects.equals(from, other.from)) &&
(Objects.equals(to, other.to)) &&
(Objects.equals(includeUpper, other.includeUpper)) &&
(Objects.equals(includeLower, other.includeLower)) &&
(Objects.equals(geoDistance, other.geoDistance)) &&
(Objects.equals(optimizeBbox, other.optimizeBbox)) &&
(Objects.equals(validationMethod, other.validationMethod)));
}
@Override
protected int doHashCode() {
return Objects.hash(fieldName, point, from, to, includeUpper, includeLower, geoDistance, optimizeBbox, validationMethod);
}
@Override
public String getWriteableName() {
return NAME;
}
}

View File

@ -19,19 +19,13 @@
package org.elasticsearch.index.query;
import org.apache.lucene.search.Query;
import org.elasticsearch.Version;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.geo.GeoDistance;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.geo.GeoUtils;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
import org.elasticsearch.index.search.geo.GeoDistanceRangeQuery;
import java.io.IOException;
@ -43,71 +37,95 @@ import java.io.IOException;
* }
* </pre>
*/
public class GeoDistanceRangeQueryParser implements QueryParser {
public class GeoDistanceRangeQueryParser implements QueryParser<GeoDistanceRangeQueryBuilder> {
public static final String NAME = "geo_distance_range";
@Inject
public GeoDistanceRangeQueryParser() {
}
public static final ParseField FROM_FIELD = new ParseField("from");
public static final ParseField TO_FIELD = new ParseField("to");
public static final ParseField INCLUDE_LOWER_FIELD = new ParseField("include_lower");
public static final ParseField INCLUDE_UPPER_FIELD = new ParseField("include_upper");
public static final ParseField GT_FIELD = new ParseField("gt");
public static final ParseField GTE_FIELD = new ParseField("gte", "ge");
public static final ParseField LT_FIELD = new ParseField("lt");
public static final ParseField LTE_FIELD = new ParseField("lte", "le");
public static final ParseField UNIT_FIELD = new ParseField("unit");
public static final ParseField DISTANCE_TYPE_FIELD = new ParseField("distance_type");
public static final ParseField NAME_FIELD = new ParseField("_name");
public static final ParseField BOOST_FIELD = new ParseField("boost");
public static final ParseField OPTIMIZE_BBOX_FIELD = new ParseField("optimize_bbox");
public static final ParseField COERCE_FIELD = new ParseField("coerce", "normalize");
public static final ParseField IGNORE_MALFORMED_FIELD = new ParseField("ignore_malformed");
public static final ParseField VALIDATION_METHOD = new ParseField("validation_method");
@Override
public String[] names() {
return new String[]{NAME, "geoDistanceRange"};
return new String[]{GeoDistanceRangeQueryBuilder.NAME, "geoDistanceRange"};
}
@Override
public Query parse(QueryParseContext parseContext) throws IOException, ParsingException {
public GeoDistanceRangeQueryBuilder getBuilderPrototype() {
return GeoDistanceRangeQueryBuilder.PROTOTYPE;
}
@Override
public GeoDistanceRangeQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException {
XContentParser parser = parseContext.parser();
XContentParser.Token token;
Float boost = null;
String queryName = null;
String currentFieldName = null;
GeoPoint point = new GeoPoint();
GeoPoint point = null;
String fieldName = null;
Object vFrom = null;
Object vTo = null;
boolean includeLower = true;
boolean includeUpper = true;
DistanceUnit unit = DistanceUnit.DEFAULT;
GeoDistance geoDistance = GeoDistance.DEFAULT;
String optimizeBbox = "memory";
final boolean indexCreatedBeforeV2_0 = parseContext.indexVersionCreated().before(Version.V_2_0_0);
boolean coerce = false;
boolean ignoreMalformed = false;
Boolean includeLower = null;
Boolean includeUpper = null;
DistanceUnit unit = null;
GeoDistance geoDistance = null;
String optimizeBbox = null;
boolean coerce = GeoValidationMethod.DEFAULT_LENIENT_PARSING;
boolean ignoreMalformed = GeoValidationMethod.DEFAULT_LENIENT_PARSING;
GeoValidationMethod validationMethod = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (parseContext.isDeprecatedSetting(currentFieldName)) {
// skip
} else if (token == XContentParser.Token.START_ARRAY) {
if (point == null) {
point = new GeoPoint();
}
GeoUtils.parseGeoPoint(parser, point);
fieldName = currentFieldName;
} else if (token == XContentParser.Token.START_OBJECT) {
// the json in the format of -> field : { lat : 30, lon : 12 }
fieldName = currentFieldName;
if (point == null) {
point = new GeoPoint();
}
GeoUtils.parseGeoPoint(parser, point);
} else if (token.isValue()) {
if (currentFieldName.equals("from")) {
if (parseContext.parseFieldMatcher().match(currentFieldName, FROM_FIELD)) {
if (token == XContentParser.Token.VALUE_NULL) {
} else if (token == XContentParser.Token.VALUE_STRING) {
vFrom = parser.text(); // a String
} else {
vFrom = parser.numberValue(); // a Number
}
} else if (currentFieldName.equals("to")) {
} else if (parseContext.parseFieldMatcher().match(currentFieldName, TO_FIELD)) {
if (token == XContentParser.Token.VALUE_NULL) {
} else if (token == XContentParser.Token.VALUE_STRING) {
vTo = parser.text(); // a String
} else {
vTo = parser.numberValue(); // a Number
}
} else if ("include_lower".equals(currentFieldName) || "includeLower".equals(currentFieldName)) {
} else if (parseContext.parseFieldMatcher().match(currentFieldName, INCLUDE_LOWER_FIELD)) {
includeLower = parser.booleanValue();
} else if ("include_upper".equals(currentFieldName) || "includeUpper".equals(currentFieldName)) {
} else if (parseContext.parseFieldMatcher().match(currentFieldName, INCLUDE_UPPER_FIELD)) {
includeUpper = parser.booleanValue();
} else if ("gt".equals(currentFieldName)) {
} else if (parseContext.parseFieldMatcher().match(currentFieldName, GT_FIELD)) {
if (token == XContentParser.Token.VALUE_NULL) {
} else if (token == XContentParser.Token.VALUE_STRING) {
vFrom = parser.text(); // a String
@ -115,7 +133,7 @@ public class GeoDistanceRangeQueryParser implements QueryParser {
vFrom = parser.numberValue(); // a Number
}
includeLower = false;
} else if ("gte".equals(currentFieldName) || "ge".equals(currentFieldName)) {
} else if (parseContext.parseFieldMatcher().match(currentFieldName, GTE_FIELD)) {
if (token == XContentParser.Token.VALUE_NULL) {
} else if (token == XContentParser.Token.VALUE_STRING) {
vFrom = parser.text(); // a String
@ -123,7 +141,7 @@ public class GeoDistanceRangeQueryParser implements QueryParser {
vFrom = parser.numberValue(); // a Number
}
includeLower = true;
} else if ("lt".equals(currentFieldName)) {
} else if (parseContext.parseFieldMatcher().match(currentFieldName, LT_FIELD)) {
if (token == XContentParser.Token.VALUE_NULL) {
} else if (token == XContentParser.Token.VALUE_STRING) {
vTo = parser.text(); // a String
@ -131,7 +149,7 @@ public class GeoDistanceRangeQueryParser implements QueryParser {
vTo = parser.numberValue(); // a Number
}
includeUpper = false;
} else if ("lte".equals(currentFieldName) || "le".equals(currentFieldName)) {
} else if (parseContext.parseFieldMatcher().match(currentFieldName, LTE_FIELD)) {
if (token == XContentParser.Token.VALUE_NULL) {
} else if (token == XContentParser.Token.VALUE_STRING) {
vTo = parser.text(); // a String
@ -139,84 +157,98 @@ public class GeoDistanceRangeQueryParser implements QueryParser {
vTo = parser.numberValue(); // a Number
}
includeUpper = true;
} else if (currentFieldName.equals("unit")) {
} else if (parseContext.parseFieldMatcher().match(currentFieldName, UNIT_FIELD)) {
unit = DistanceUnit.fromString(parser.text());
} else if (currentFieldName.equals("distance_type") || currentFieldName.equals("distanceType")) {
} else if (parseContext.parseFieldMatcher().match(currentFieldName, DISTANCE_TYPE_FIELD)) {
geoDistance = GeoDistance.fromString(parser.text());
} else if (currentFieldName.endsWith(GeoPointFieldMapper.Names.LAT_SUFFIX)) {
if (point == null) {
point = new GeoPoint();
}
point.resetLat(parser.doubleValue());
fieldName = currentFieldName.substring(0, currentFieldName.length() - GeoPointFieldMapper.Names.LAT_SUFFIX.length());
} else if (currentFieldName.endsWith(GeoPointFieldMapper.Names.LON_SUFFIX)) {
if (point == null) {
point = new GeoPoint();
}
point.resetLon(parser.doubleValue());
fieldName = currentFieldName.substring(0, currentFieldName.length() - GeoPointFieldMapper.Names.LON_SUFFIX.length());
} else if (currentFieldName.endsWith(GeoPointFieldMapper.Names.GEOHASH_SUFFIX)) {
point.resetFromGeoHash(parser.text());
point = GeoPoint.fromGeohash(parser.text());
fieldName = currentFieldName.substring(0, currentFieldName.length() - GeoPointFieldMapper.Names.GEOHASH_SUFFIX.length());
} else if ("_name".equals(currentFieldName)) {
} else if (parseContext.parseFieldMatcher().match(currentFieldName, NAME_FIELD)) {
queryName = parser.text();
} else if ("optimize_bbox".equals(currentFieldName) || "optimizeBbox".equals(currentFieldName)) {
} else if (parseContext.parseFieldMatcher().match(currentFieldName, BOOST_FIELD)) {
boost = parser.floatValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, OPTIMIZE_BBOX_FIELD)) {
optimizeBbox = parser.textOrNull();
} else if ("coerce".equals(currentFieldName) || (indexCreatedBeforeV2_0 && "normalize".equals(currentFieldName))) {
} else if (parseContext.parseFieldMatcher().match(currentFieldName, COERCE_FIELD)) {
coerce = parser.booleanValue();
if (coerce == true) {
ignoreMalformed = true;
}
} else if ("ignore_malformed".equals(currentFieldName) && coerce == false) {
} else if (parseContext.parseFieldMatcher().match(currentFieldName, IGNORE_MALFORMED_FIELD)) {
ignoreMalformed = parser.booleanValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, VALIDATION_METHOD)) {
validationMethod = GeoValidationMethod.fromString(parser.text());
} else {
if (point == null) {
point = new GeoPoint();
}
point.resetFromString(parser.text());
fieldName = currentFieldName;
}
}
}
// validation was not available prior to 2.x, so to support bwc percolation queries we only ignore_malformed on 2.x created indexes
if (!indexCreatedBeforeV2_0 && !ignoreMalformed) {
if (point.lat() > 90.0 || point.lat() < -90.0) {
throw new ParsingException(parseContext, "illegal latitude value [{}] for [{}]", point.lat(), NAME);
}
if (point.lon() > 180.0 || point.lon() < -180) {
throw new ParsingException(parseContext, "illegal longitude value [{}] for [{}]", point.lon(), NAME);
}
GeoDistanceRangeQueryBuilder queryBuilder = new GeoDistanceRangeQueryBuilder(fieldName, point);
if (boost != null) {
queryBuilder.boost(boost);
}
if (coerce) {
GeoUtils.normalizePoint(point, coerce, coerce);
if (queryName != null) {
queryBuilder.queryName(queryName);
}
Double from = null;
Double to = null;
if (vFrom != null) {
if (vFrom instanceof Number) {
from = unit.toMeters(((Number) vFrom).doubleValue());
queryBuilder.from((Number) vFrom);
} else {
from = DistanceUnit.parse((String) vFrom, unit, DistanceUnit.DEFAULT);
queryBuilder.from((String) vFrom);
}
from = geoDistance.normalize(from, DistanceUnit.DEFAULT);
}
if (vTo != null) {
if (vTo instanceof Number) {
to = unit.toMeters(((Number) vTo).doubleValue());
queryBuilder.to((Number) vTo);
} else {
to = DistanceUnit.parse((String) vTo, unit, DistanceUnit.DEFAULT);
queryBuilder.to((String) vTo);
}
to = geoDistance.normalize(to, DistanceUnit.DEFAULT);
}
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
if (fieldType == null) {
throw new ParsingException(parseContext, "failed to find geo_point field [" + fieldName + "]");
if (includeUpper != null) {
queryBuilder.includeUpper(includeUpper);
}
if (!(fieldType instanceof GeoPointFieldMapper.GeoPointFieldType)) {
throw new ParsingException(parseContext, "field [" + fieldName + "] is not a geo_point field");
}
GeoPointFieldMapper.GeoPointFieldType geoFieldType = ((GeoPointFieldMapper.GeoPointFieldType) fieldType);
IndexGeoPointFieldData indexFieldData = parseContext.getForField(fieldType);
Query query = new GeoDistanceRangeQuery(point, from, to, includeLower, includeUpper, geoDistance, geoFieldType, indexFieldData, optimizeBbox);
if (queryName != null) {
parseContext.addNamedQuery(queryName, query);
if (includeLower != null) {
queryBuilder.includeLower(includeLower);
}
return query;
if (unit != null) {
queryBuilder.unit(unit);
}
if (geoDistance != null) {
queryBuilder.geoDistance(geoDistance);
}
if (optimizeBbox != null) {
queryBuilder.optimizeBbox(optimizeBbox);
}
if (validationMethod != null) {
// if validation method is set explicitly ignore deprecated coerce/ignore malformed fields if any
queryBuilder.setValidationMethod(validationMethod);
} else {
queryBuilder.setValidationMethod(GeoValidationMethod.infer(coerce, ignoreMalformed));
}
return queryBuilder;
}
}

View File

@ -0,0 +1,73 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.query;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import java.io.IOException;
/** Specifies how a geo query should be run. */
public enum GeoExecType implements Writeable<GeoExecType> {
MEMORY(0), INDEXED(1);
private final int ordinal;
private static final GeoExecType PROTOTYPE = MEMORY;
GeoExecType(int ordinal) {
this.ordinal = ordinal;
}
@Override
public GeoExecType readFrom(StreamInput in) throws IOException {
int ord = in.readVInt();
switch(ord) {
case(0): return MEMORY;
case(1): return INDEXED;
}
throw new ElasticsearchException("unknown serialized type [" + ord + "]");
}
public static GeoExecType readTypeFrom(StreamInput in) throws IOException {
return PROTOTYPE.readFrom(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeVInt(this.ordinal);
}
public static GeoExecType fromString(String typeName) {
if (typeName == null) {
throw new IllegalArgumentException("cannot parse type from null string");
}
for (GeoExecType type : GeoExecType.values()) {
if (type.name().equalsIgnoreCase(typeName)) {
return type;
}
}
throw new IllegalArgumentException("no type can be parsed from ordinal " + typeName);
}
}

View File

@ -19,90 +19,178 @@
package org.elasticsearch.index.query;
import org.apache.lucene.search.Query;
import org.elasticsearch.Version;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.geo.GeoUtils;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
import org.elasticsearch.index.search.geo.GeoPolygonQuery;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Objects;
public class GeoPolygonQueryBuilder extends QueryBuilder {
public class GeoPolygonQueryBuilder extends AbstractQueryBuilder<GeoPolygonQueryBuilder> {
public static final String POINTS = GeoPolygonQueryParser.POINTS;
private final String name;
public static final String NAME = "geo_polygon";
private final List<GeoPoint> shell = new ArrayList<>();
private static final List<GeoPoint> PROTO_SHAPE = Arrays.asList(new GeoPoint[] { new GeoPoint(1.0, 1.0), new GeoPoint(1.0, 2.0),
new GeoPoint(2.0, 1.0) });
private String queryName;
static final GeoPolygonQueryBuilder PROTOTYPE = new GeoPolygonQueryBuilder("field", PROTO_SHAPE);
private Boolean coerce;
private final String fieldName;
private Boolean ignoreMalformed;
private final List<GeoPoint> shell;
public GeoPolygonQueryBuilder(String name) {
this.name = name;
private GeoValidationMethod validationMethod = GeoValidationMethod.DEFAULT;
public GeoPolygonQueryBuilder(String fieldName, List<GeoPoint> points) {
if (Strings.isEmpty(fieldName)) {
throw new IllegalArgumentException("fieldName must not be null");
}
if (points == null || points.isEmpty()) {
throw new IllegalArgumentException("polygon must not be null or empty");
} else {
GeoPoint start = points.get(0);
if (start.equals(points.get(points.size() - 1))) {
if (points.size() < 4) {
throw new IllegalArgumentException("too few points defined for geo_polygon query");
}
} else {
if (points.size() < 3) {
throw new IllegalArgumentException("too few points defined for geo_polygon query");
}
}
}
this.fieldName = fieldName;
this.shell = points;
}
/**
* Adds a point with lat and lon
*
* @param lat The latitude
* @param lon The longitude
*/
public GeoPolygonQueryBuilder addPoint(double lat, double lon) {
return addPoint(new GeoPoint(lat, lon));
public String fieldName() {
return fieldName;
}
public GeoPolygonQueryBuilder addPoint(String geohash) {
return addPoint(GeoPoint.fromGeohash(geohash));
public List<GeoPoint> points() {
return shell;
}
public GeoPolygonQueryBuilder addPoint(GeoPoint point) {
shell.add(point);
return this;
}
/**
* Sets the filter name for the filter that can be used when searching for matched_filters per hit.
*/
public GeoPolygonQueryBuilder queryName(String queryName) {
this.queryName = queryName;
/** Sets the validation method to use for geo coordinates. */
public GeoPolygonQueryBuilder setValidationMethod(GeoValidationMethod method) {
this.validationMethod = method;
return this;
}
public GeoPolygonQueryBuilder coerce(boolean coerce) {
this.coerce = coerce;
return this;
/** Returns the validation method to use for geo coordinates. */
public GeoValidationMethod getValidationMethod() {
return this.validationMethod;
}
public GeoPolygonQueryBuilder ignoreMalformed(boolean ignoreMalformed) {
this.ignoreMalformed = ignoreMalformed;
return this;
@Override
protected Query doToQuery(QueryShardContext context) throws IOException {
if (!shell.get(shell.size() - 1).equals(shell.get(0))) {
shell.add(shell.get(0));
}
final boolean indexCreatedBeforeV2_0 = context.indexVersionCreated().before(Version.V_2_0_0);
// validation was not available prior to 2.x, so to support bwc
// percolation queries we only ignore_malformed on 2.x created indexes
if (!indexCreatedBeforeV2_0 && !GeoValidationMethod.isIgnoreMalformed(validationMethod)) {
for (GeoPoint point : shell) {
if (!GeoUtils.isValidLatitude(point.lat())) {
throw new QueryShardException(context, "illegal latitude value [{}] for [{}]", point.lat(),
GeoPolygonQueryBuilder.NAME);
}
if (!GeoUtils.isValidLongitude(point.lat())) {
throw new QueryShardException(context, "illegal longitude value [{}] for [{}]", point.lon(),
GeoPolygonQueryBuilder.NAME);
}
}
}
if (GeoValidationMethod.isCoerce(validationMethod)) {
for (GeoPoint point : shell) {
GeoUtils.normalizePoint(point, true, true);
}
}
MappedFieldType fieldType = context.fieldMapper(fieldName);
if (fieldType == null) {
throw new QueryShardException(context, "failed to find geo_point field [" + fieldName + "]");
}
if (!(fieldType instanceof GeoPointFieldMapper.GeoPointFieldType)) {
throw new QueryShardException(context, "field [" + fieldName + "] is not a geo_point field");
}
IndexGeoPointFieldData indexFieldData = context.getForField(fieldType);
return new GeoPolygonQuery(indexFieldData, shell.toArray(new GeoPoint[shell.size()]));
}
@Override
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(GeoPolygonQueryParser.NAME);
builder.startObject(NAME);
builder.startObject(name);
builder.startArray(POINTS);
builder.startObject(fieldName);
builder.startArray(GeoPolygonQueryParser.POINTS_FIELD.getPreferredName());
for (GeoPoint point : shell) {
builder.startArray().value(point.lon()).value(point.lat()).endArray();
}
builder.endArray();
builder.endObject();
if (queryName != null) {
builder.field("_name", queryName);
}
if (coerce != null) {
builder.field("coerce", coerce);
}
if (ignoreMalformed != null) {
builder.field("ignore_malformed", ignoreMalformed);
}
builder.field(GeoPolygonQueryParser.COERCE_FIELD.getPreferredName(), GeoValidationMethod.isCoerce(validationMethod));
builder.field(GeoPolygonQueryParser.IGNORE_MALFORMED_FIELD.getPreferredName(), GeoValidationMethod.isIgnoreMalformed(validationMethod));
printBoostAndQueryName(builder);
builder.endObject();
}
@Override
protected GeoPolygonQueryBuilder doReadFrom(StreamInput in) throws IOException {
String fieldName = in.readString();
List<GeoPoint> shell = new ArrayList<>();
int size = in.readVInt();
for (int i = 0; i < size; i++) {
shell.add(GeoPoint.readGeoPointFrom(in));
}
GeoPolygonQueryBuilder builder = new GeoPolygonQueryBuilder(fieldName, shell);
builder.validationMethod = GeoValidationMethod.readGeoValidationMethodFrom(in);
return builder;
}
@Override
protected void doWriteTo(StreamOutput out) throws IOException {
out.writeString(fieldName);
out.writeVInt(shell.size());
for (GeoPoint point : shell) {
point.writeTo(out);
}
validationMethod.writeTo(out);
}
@Override
protected boolean doEquals(GeoPolygonQueryBuilder other) {
return Objects.equals(validationMethod, other.validationMethod)
&& Objects.equals(fieldName, other.fieldName)
&& Objects.equals(shell, other.shell);
}
@Override
protected int doHashCode() {
return Objects.hash(validationMethod, fieldName, shell);
}
@Override
public String getWriteableName() {
return NAME;
}
}

View File

@ -19,18 +19,12 @@
package org.elasticsearch.index.query;
import org.apache.lucene.search.Query;
import org.elasticsearch.Version;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.geo.GeoUtils;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParser.Token;
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
import org.elasticsearch.index.search.geo.GeoPolygonQuery;
import java.io.IOException;
import java.util.ArrayList;
@ -48,31 +42,30 @@ import java.util.List;
* }
* </pre>
*/
public class GeoPolygonQueryParser implements QueryParser {
public class GeoPolygonQueryParser implements QueryParser<GeoPolygonQueryBuilder> {
public static final String NAME = "geo_polygon";
public static final String POINTS = "points";
@Inject
public GeoPolygonQueryParser() {
}
public static final ParseField COERCE_FIELD = new ParseField("coerce", "normalize");
public static final ParseField IGNORE_MALFORMED_FIELD = new ParseField("ignore_malformed");
public static final ParseField VALIDATION_METHOD = new ParseField("validation_method");
public static final ParseField POINTS_FIELD = new ParseField("points");
@Override
public String[] names() {
return new String[]{NAME, "geoPolygon"};
return new String[]{GeoPolygonQueryBuilder.NAME, "geoPolygon"};
}
@Override
public Query parse(QueryParseContext parseContext) throws IOException, ParsingException {
public GeoPolygonQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException {
XContentParser parser = parseContext.parser();
String fieldName = null;
List<GeoPoint> shell = new ArrayList<>();
List<GeoPoint> shell = null;
final boolean indexCreatedBeforeV2_0 = parseContext.indexVersionCreated().before(Version.V_2_0_0);
boolean coerce = false;
boolean ignoreMalformed = false;
Float boost = null;
boolean coerce = GeoValidationMethod.DEFAULT_LENIENT_PARSING;
boolean ignoreMalformed = GeoValidationMethod.DEFAULT_LENIENT_PARSING;
GeoValidationMethod validationMethod = null;
String queryName = null;
String currentFieldName = null;
XContentParser.Token token;
@ -89,86 +82,60 @@ public class GeoPolygonQueryParser implements QueryParser {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_ARRAY) {
if (POINTS.equals(currentFieldName)) {
if (parseContext.parseFieldMatcher().match(currentFieldName, POINTS_FIELD)) {
shell = new ArrayList<GeoPoint>();
while ((token = parser.nextToken()) != Token.END_ARRAY) {
shell.add(GeoUtils.parseGeoPoint(parser));
}
if (!shell.get(shell.size()-1).equals(shell.get(0))) {
shell.add(shell.get(0));
}
} else {
throw new ParsingException(parseContext, "[geo_polygon] query does not support [" + currentFieldName
throw new ParsingException(parser.getTokenLocation(), "[geo_polygon] query does not support [" + currentFieldName
+ "]");
}
} else {
throw new ParsingException(parseContext, "[geo_polygon] query does not support token type [" + token.name()
throw new ParsingException(parser.getTokenLocation(), "[geo_polygon] query does not support token type [" + token.name()
+ "] under [" + currentFieldName + "]");
}
}
} else if (token.isValue()) {
if ("_name".equals(currentFieldName)) {
queryName = parser.text();
} else if ("coerce".equals(currentFieldName) || (indexCreatedBeforeV2_0 && "normalize".equals(currentFieldName))) {
} else if ("boost".equals(currentFieldName)) {
boost = parser.floatValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, COERCE_FIELD)) {
coerce = parser.booleanValue();
if (coerce == true) {
ignoreMalformed = true;
}
} else if ("ignore_malformed".equals(currentFieldName) && coerce == false) {
} else if (parseContext.parseFieldMatcher().match(currentFieldName, IGNORE_MALFORMED_FIELD)) {
ignoreMalformed = parser.booleanValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, VALIDATION_METHOD)) {
validationMethod = GeoValidationMethod.fromString(parser.text());
} else {
throw new ParsingException(parseContext, "[geo_polygon] query does not support [" + currentFieldName + "]");
throw new ParsingException(parser.getTokenLocation(), "[geo_polygon] query does not support [" + currentFieldName + "]");
}
} else {
throw new ParsingException(parseContext, "[geo_polygon] unexpected token type [" + token.name() + "]");
throw new ParsingException(parser.getTokenLocation(), "[geo_polygon] unexpected token type [" + token.name() + "]");
}
}
if (shell.isEmpty()) {
throw new ParsingException(parseContext, "no points defined for geo_polygon query");
GeoPolygonQueryBuilder builder = new GeoPolygonQueryBuilder(fieldName, shell);
if (validationMethod != null) {
// if GeoValidationMethod was explicitly set ignore deprecated coerce and ignoreMalformed settings
builder.setValidationMethod(validationMethod);
} else {
if (shell.size() < 3) {
throw new ParsingException(parseContext, "too few points defined for geo_polygon query");
}
GeoPoint start = shell.get(0);
if (!start.equals(shell.get(shell.size() - 1))) {
shell.add(start);
}
if (shell.size() < 4) {
throw new ParsingException(parseContext, "too few points defined for geo_polygon query");
}
builder.setValidationMethod(GeoValidationMethod.infer(coerce, ignoreMalformed));
}
// validation was not available prior to 2.x, so to support bwc percolation queries we only ignore_malformed on 2.x created indexes
if (!indexCreatedBeforeV2_0 && !ignoreMalformed) {
for (GeoPoint point : shell) {
if (point.lat() > 90.0 || point.lat() < -90.0) {
throw new ParsingException(parseContext, "illegal latitude value [{}] for [{}]", point.lat(), NAME);
}
if (point.lon() > 180.0 || point.lon() < -180) {
throw new ParsingException(parseContext, "illegal longitude value [{}] for [{}]", point.lon(), NAME);
}
}
}
if (coerce) {
for (GeoPoint point : shell) {
GeoUtils.normalizePoint(point, coerce, coerce);
}
}
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
if (fieldType == null) {
throw new ParsingException(parseContext, "failed to find geo_point field [" + fieldName + "]");
}
if (!(fieldType instanceof GeoPointFieldMapper.GeoPointFieldType)) {
throw new ParsingException(parseContext, "field [" + fieldName + "] is not a geo_point field");
}
IndexGeoPointFieldData indexFieldData = parseContext.getForField(fieldType);
Query query = new GeoPolygonQuery(indexFieldData, shell.toArray(new GeoPoint[shell.size()]));
if (queryName != null) {
parseContext.addNamedQuery(queryName, query);
builder.queryName(queryName);
}
return query;
if (boost != null) {
builder.boost(boost);
}
return builder;
}
@Override
public GeoPolygonQueryBuilder getBuilderPrototype() {
return GeoPolygonQueryBuilder.PROTOTYPE;
}
}

View File

@ -19,100 +19,182 @@
package org.elasticsearch.index.query;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.Query;
import org.apache.lucene.spatial.prefix.PrefixTreeStrategy;
import org.apache.lucene.spatial.prefix.RecursivePrefixTreeStrategy;
import org.apache.lucene.spatial.query.SpatialArgs;
import org.apache.lucene.spatial.query.SpatialOperation;
import org.elasticsearch.action.get.GetRequest;
import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.geo.ShapeRelation;
import org.elasticsearch.common.geo.ShapesAvailability;
import org.elasticsearch.common.geo.SpatialStrategy;
import org.elasticsearch.common.geo.builders.ShapeBuilder;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.geo.GeoShapeFieldMapper;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
import java.util.Objects;
/**
* {@link QueryBuilder} that builds a GeoShape Filter
* {@link QueryBuilder} that builds a GeoShape Query
*/
public class GeoShapeQueryBuilder extends QueryBuilder implements BoostableQueryBuilder<GeoShapeQueryBuilder> {
public class GeoShapeQueryBuilder extends AbstractQueryBuilder<GeoShapeQueryBuilder> {
private final String name;
public static final String NAME = "geo_shape";
public static final String DEFAULT_SHAPE_INDEX_NAME = "shapes";
public static final String DEFAULT_SHAPE_FIELD_NAME = "shape";
public static final ShapeRelation DEFAULT_SHAPE_RELATION = ShapeRelation.INTERSECTS;
private final ShapeBuilder shape;
static final GeoShapeQueryBuilder PROTOTYPE = new GeoShapeQueryBuilder("field", new BytesArray(new byte[1]));
private final String fieldName;
// TODO make the ShapeBuilder and subclasses Writable and implement hashCode
// and Equals so ShapeBuilder can be used here
private BytesReference shapeBytes;
private SpatialStrategy strategy = null;
private String queryName;
private final String indexedShapeId;
private final String indexedShapeType;
private String indexedShapeIndex;
private String indexedShapePath;
private String indexedShapeIndex = DEFAULT_SHAPE_INDEX_NAME;
private String indexedShapePath = DEFAULT_SHAPE_FIELD_NAME;
private ShapeRelation relation = null;
private ShapeRelation relation = DEFAULT_SHAPE_RELATION;
private float boost = -1;
/**
* Creates a new GeoShapeQueryBuilder whose Filter will be against the
* given field name using the given Shape
* Creates a new GeoShapeQueryBuilder whose Query will be against the given
* field name using the given Shape
*
* @param name Name of the field that will be filtered
* @param shape Shape used in the filter
* @param fieldName
* Name of the field that will be queried
* @param shape
* Shape used in the Query
*/
public GeoShapeQueryBuilder(String name, ShapeBuilder shape) {
this(name, shape, null, null, null);
public GeoShapeQueryBuilder(String fieldName, ShapeBuilder shape) throws IOException {
this(fieldName, shape, null, null);
}
/**
* Creates a new GeoShapeQueryBuilder whose Filter will be against the
* given field name using the given Shape
* Creates a new GeoShapeQueryBuilder whose Query will be against the given
* field name and will use the Shape found with the given ID in the given
* type
*
* @param name Name of the field that will be filtered
* @param relation {@link ShapeRelation} of query and indexed shape
* @param shape Shape used in the filter
* @param fieldName
* Name of the field that will be filtered
* @param indexedShapeId
* ID of the indexed Shape that will be used in the Query
* @param indexedShapeType
* Index type of the indexed Shapes
*/
public GeoShapeQueryBuilder(String name, ShapeBuilder shape, ShapeRelation relation) {
this(name, shape, null, null, relation);
public GeoShapeQueryBuilder(String fieldName, String indexedShapeId, String indexedShapeType) {
this(fieldName, (BytesReference) null, indexedShapeId, indexedShapeType);
}
/**
* Creates a new GeoShapeQueryBuilder whose Filter will be against the given field name
* and will use the Shape found with the given ID in the given type
*
* @param name Name of the field that will be filtered
* @param indexedShapeId ID of the indexed Shape that will be used in the Filter
* @param indexedShapeType Index type of the indexed Shapes
*/
public GeoShapeQueryBuilder(String name, String indexedShapeId, String indexedShapeType, ShapeRelation relation) {
this(name, null, indexedShapeId, indexedShapeType, relation);
GeoShapeQueryBuilder(String fieldName, BytesReference shapeBytes) {
this(fieldName, shapeBytes, null, null);
}
private GeoShapeQueryBuilder(String name, ShapeBuilder shape, String indexedShapeId, String indexedShapeType, ShapeRelation relation) {
this.name = name;
this.shape = shape;
private GeoShapeQueryBuilder(String fieldName, ShapeBuilder shape, String indexedShapeId, String indexedShapeType) throws IOException {
this(fieldName, new BytesArray(new byte[1]), indexedShapeId, indexedShapeType);
if (shape != null) {
XContentBuilder builder = XContentFactory.jsonBuilder();
shape.toXContent(builder, EMPTY_PARAMS);
this.shapeBytes = shape.buildAsBytes(XContentType.JSON);
if (this.shapeBytes.length() == 0) {
throw new IllegalArgumentException("shape must not be empty");
}
} else {
throw new IllegalArgumentException("shape must not be null");
}
}
private GeoShapeQueryBuilder(String fieldName, BytesReference shapeBytes, String indexedShapeId, String indexedShapeType) {
if (fieldName == null) {
throw new IllegalArgumentException("fieldName is required");
}
if ((shapeBytes == null || shapeBytes.length() == 0) && indexedShapeId == null) {
throw new IllegalArgumentException("either shapeBytes or indexedShapeId and indexedShapeType are required");
}
if (indexedShapeId != null && indexedShapeType == null) {
throw new IllegalArgumentException("indexedShapeType is required if indexedShapeId is specified");
}
this.fieldName = fieldName;
this.shapeBytes = shapeBytes;
this.indexedShapeId = indexedShapeId;
this.relation = relation;
this.indexedShapeType = indexedShapeType;
}
/**
* Sets the name of the filter
* @return the name of the field that will be queried
*/
public String fieldName() {
return fieldName;
}
/**
* @return the JSON bytes for the shape used in the Query
*/
public BytesReference shapeBytes() {
return shapeBytes;
}
/**
* @return the ID of the indexed Shape that will be used in the Query
*/
public String indexedShapeId() {
return indexedShapeId;
}
/**
* @return the document type of the indexed Shape that will be used in the
* Query
*/
public String indexedShapeType() {
return indexedShapeType;
}
/**
* Defines which spatial strategy will be used for building the geo shape
* Query. When not set, the strategy that will be used will be the one that
* is associated with the geo shape field in the mappings.
*
* @param queryName Name of the filter
* @param strategy
* The spatial strategy to use for building the geo shape Query
* @return this
*/
public GeoShapeQueryBuilder queryName(String queryName) {
this.queryName = queryName;
public GeoShapeQueryBuilder strategy(SpatialStrategy strategy) {
if (strategy != null && strategy == SpatialStrategy.TERM && relation != ShapeRelation.INTERSECTS) {
throw new IllegalArgumentException("strategy [" + strategy.getStrategyName() + "] only supports relation ["
+ ShapeRelation.INTERSECTS.getRelationName() + "] found relation [" + relation.getRelationName() + "]");
}
this.strategy = strategy;
return this;
}
/**
* Defines which spatial strategy will be used for building the geo shape filter. When not set, the strategy that
* will be used will be the one that is associated with the geo shape field in the mappings.
*
* @param strategy The spatial strategy to use for building the geo shape filter
* @return this
* @return The spatial strategy to use for building the geo shape Query
*/
public GeoShapeQueryBuilder strategy(SpatialStrategy strategy) {
this.strategy = strategy;
return this;
public SpatialStrategy strategy() {
return strategy;
}
/**
@ -126,6 +208,14 @@ public class GeoShapeQueryBuilder extends QueryBuilder implements BoostableQuery
return this;
}
/**
* @return the index name for the indexed Shape that will be used in the
* Query
*/
public String indexedShapeIndex() {
return indexedShapeIndex;
}
/**
* Sets the path of the field in the indexed Shape document that has the Shape itself
*
@ -137,6 +227,13 @@ public class GeoShapeQueryBuilder extends QueryBuilder implements BoostableQuery
return this;
}
/**
* @return the path of the indexed Shape that will be used in the Query
*/
public String indexedShapePath() {
return indexedShapePath;
}
/**
* Sets the relation of query shape and indexed shape.
*
@ -144,55 +241,235 @@ public class GeoShapeQueryBuilder extends QueryBuilder implements BoostableQuery
* @return this
*/
public GeoShapeQueryBuilder relation(ShapeRelation relation) {
if (relation == null) {
throw new IllegalArgumentException("No Shape Relation defined");
}
if (strategy != null && strategy == SpatialStrategy.TERM && relation != ShapeRelation.INTERSECTS) {
throw new IllegalArgumentException("current strategy [" + strategy.getStrategyName() + "] only supports relation ["
+ ShapeRelation.INTERSECTS.getRelationName() + "] found relation [" + relation.getRelationName() + "]");
}
this.relation = relation;
return this;
}
/**
* @return the relation of query shape and indexed shape to use in the Query
*/
public ShapeRelation relation() {
return relation;
}
@Override
public GeoShapeQueryBuilder boost(float boost) {
this.boost = boost;
return this;
protected Query doToQuery(QueryShardContext context) throws IOException {
ShapeBuilder shape;
if (shapeBytes == null) {
GetRequest getRequest = new GetRequest(indexedShapeIndex, indexedShapeType, indexedShapeId);
getRequest.copyContextAndHeadersFrom(SearchContext.current());
shape = fetch(context.getClient(), getRequest, indexedShapePath);
} else {
XContentParser shapeParser = XContentHelper.createParser(shapeBytes);
shapeParser.nextToken();
shape = ShapeBuilder.parse(shapeParser);
}
MappedFieldType fieldType = context.fieldMapper(fieldName);
if (fieldType == null) {
throw new QueryShardException(context, "Failed to find geo_shape field [" + fieldName + "]");
}
// TODO: This isn't the nicest way to check this
if (!(fieldType instanceof GeoShapeFieldMapper.GeoShapeFieldType)) {
throw new QueryShardException(context, "Field [" + fieldName + "] is not a geo_shape");
}
GeoShapeFieldMapper.GeoShapeFieldType shapeFieldType = (GeoShapeFieldMapper.GeoShapeFieldType) fieldType;
PrefixTreeStrategy strategy = shapeFieldType.defaultStrategy();
if (this.strategy != null) {
strategy = shapeFieldType.resolveStrategy(this.strategy);
}
Query query;
if (strategy instanceof RecursivePrefixTreeStrategy && relation == ShapeRelation.DISJOINT) {
// this strategy doesn't support disjoint anymore: but it did
// before, including creating lucene fieldcache (!)
// in this case, execute disjoint as exists && !intersects
BooleanQuery.Builder bool = new BooleanQuery.Builder();
Query exists = ExistsQueryBuilder.newFilter(context, fieldName);
Filter intersects = strategy.makeFilter(getArgs(shape, ShapeRelation.INTERSECTS));
bool.add(exists, BooleanClause.Occur.MUST);
bool.add(intersects, BooleanClause.Occur.MUST_NOT);
query = new ConstantScoreQuery(bool.build());
} else {
query = strategy.makeQuery(getArgs(shape, relation));
}
return query;
}
/**
* Fetches the Shape with the given ID in the given type and index.
*
* @param getRequest
* GetRequest containing index, type and id
* @param path
* Name or path of the field in the Shape Document where the
* Shape itself is located
* @return Shape with the given ID
* @throws IOException
* Can be thrown while parsing the Shape Document and extracting
* the Shape
*/
private ShapeBuilder fetch(Client client, GetRequest getRequest, String path) throws IOException {
if (ShapesAvailability.JTS_AVAILABLE == false) {
throw new IllegalStateException("JTS not available");
}
getRequest.preference("_local");
getRequest.operationThreaded(false);
GetResponse response = client.get(getRequest).actionGet();
if (!response.isExists()) {
throw new IllegalArgumentException("Shape with ID [" + getRequest.id() + "] in type [" + getRequest.type() + "] not found");
}
String[] pathElements = Strings.splitStringToArray(path, '.');
int currentPathSlot = 0;
XContentParser parser = null;
try {
parser = XContentHelper.createParser(response.getSourceAsBytesRef());
XContentParser.Token currentToken;
while ((currentToken = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (currentToken == XContentParser.Token.FIELD_NAME) {
if (pathElements[currentPathSlot].equals(parser.currentName())) {
parser.nextToken();
if (++currentPathSlot == pathElements.length) {
return ShapeBuilder.parse(parser);
}
} else {
parser.nextToken();
parser.skipChildren();
}
}
}
throw new IllegalStateException("Shape with name [" + getRequest.id() + "] found but missing " + path + " field");
} finally {
if (parser != null) {
parser.close();
}
}
}
public static SpatialArgs getArgs(ShapeBuilder shape, ShapeRelation relation) {
switch (relation) {
case DISJOINT:
return new SpatialArgs(SpatialOperation.IsDisjointTo, shape.build());
case INTERSECTS:
return new SpatialArgs(SpatialOperation.Intersects, shape.build());
case WITHIN:
return new SpatialArgs(SpatialOperation.IsWithin, shape.build());
default:
throw new IllegalArgumentException("invalid relation [" + relation + "]");
}
}
@Override
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(GeoShapeQueryParser.NAME);
builder.startObject(NAME);
builder.startObject(name);
builder.startObject(fieldName);
if (strategy != null) {
builder.field("strategy", strategy.getStrategyName());
builder.field(GeoShapeQueryParser.STRATEGY_FIELD.getPreferredName(), strategy.getStrategyName());
}
if (shape != null) {
builder.field("shape", shape);
if (shapeBytes != null) {
builder.field(GeoShapeQueryParser.SHAPE_FIELD.getPreferredName());
XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(shapeBytes);
parser.nextToken();
builder.copyCurrentStructure(parser);
} else {
builder.startObject("indexed_shape")
.field("id", indexedShapeId)
.field("type", indexedShapeType);
builder.startObject(GeoShapeQueryParser.INDEXED_SHAPE_FIELD.getPreferredName())
.field(GeoShapeQueryParser.SHAPE_ID_FIELD.getPreferredName(), indexedShapeId)
.field(GeoShapeQueryParser.SHAPE_TYPE_FIELD.getPreferredName(), indexedShapeType);
if (indexedShapeIndex != null) {
builder.field("index", indexedShapeIndex);
builder.field(GeoShapeQueryParser.SHAPE_INDEX_FIELD.getPreferredName(), indexedShapeIndex);
}
if (indexedShapePath != null) {
builder.field("path", indexedShapePath);
builder.field(GeoShapeQueryParser.SHAPE_PATH_FIELD.getPreferredName(), indexedShapePath);
}
builder.endObject();
}
if(relation != null) {
builder.field("relation", relation.getRelationName());
builder.field(GeoShapeQueryParser.RELATION_FIELD.getPreferredName(), relation.getRelationName());
}
builder.endObject();
if (boost != -1) {
builder.field("boost", boost);
}
if (name != null) {
builder.field("_name", queryName);
}
printBoostAndQueryName(builder);
builder.endObject();
}
@Override
protected GeoShapeQueryBuilder doReadFrom(StreamInput in) throws IOException {
String fieldName = in.readString();
GeoShapeQueryBuilder builder;
if (in.readBoolean()) {
BytesReference shapeBytes = in.readBytesReference();
builder = new GeoShapeQueryBuilder(fieldName, shapeBytes);
} else {
String indexedShapeId = in.readOptionalString();
String indexedShapeType = in.readOptionalString();
String indexedShapeIndex = in.readOptionalString();
String indexedShapePath = in.readOptionalString();
builder = new GeoShapeQueryBuilder(fieldName, indexedShapeId, indexedShapeType);
if (indexedShapeIndex != null) {
builder.indexedShapeIndex = indexedShapeIndex;
}
if (indexedShapePath != null) {
builder.indexedShapePath = indexedShapePath;
}
}
builder.relation = ShapeRelation.DISJOINT.readFrom(in);
builder.strategy = SpatialStrategy.RECURSIVE.readFrom(in);
return builder;
}
@Override
protected void doWriteTo(StreamOutput out) throws IOException {
out.writeString(fieldName);
boolean hasShapeBytes = shapeBytes != null;
out.writeBoolean(hasShapeBytes);
if (hasShapeBytes) {
out.writeBytesReference(shapeBytes);
} else {
out.writeOptionalString(indexedShapeId);
out.writeOptionalString(indexedShapeType);
out.writeOptionalString(indexedShapeIndex);
out.writeOptionalString(indexedShapePath);
}
relation.writeTo(out);
strategy.writeTo(out);
}
@Override
protected boolean doEquals(GeoShapeQueryBuilder other) {
return Objects.equals(fieldName, other.fieldName)
&& Objects.equals(indexedShapeId, other.indexedShapeId)
&& Objects.equals(indexedShapeIndex, other.indexedShapeIndex)
&& Objects.equals(indexedShapePath, other.indexedShapePath)
&& Objects.equals(indexedShapeType, other.indexedShapeType)
&& Objects.equals(relation, other.relation)
&& Objects.equals(shapeBytes, other.shapeBytes)
&& Objects.equals(strategy, other.strategy);
}
@Override
protected int doHashCode() {
return Objects.hash(fieldName, indexedShapeId, indexedShapeIndex,
indexedShapePath, indexedShapeType, relation, shapeBytes, strategy);
}
@Override
public String getWriteableName() {
return NAME;
}
}

View File

@ -19,59 +19,51 @@
package org.elasticsearch.index.query;
import org.apache.lucene.search.*;
import org.apache.lucene.spatial.prefix.PrefixTreeStrategy;
import org.apache.lucene.spatial.prefix.RecursivePrefixTreeStrategy;
import org.apache.lucene.spatial.query.SpatialArgs;
import org.apache.lucene.spatial.query.SpatialOperation;
import org.elasticsearch.action.get.GetRequest;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.geo.ShapeRelation;
import org.elasticsearch.common.geo.builders.ShapeBuilder;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.geo.SpatialStrategy;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.geo.GeoShapeFieldMapper;
import org.elasticsearch.index.search.shape.ShapeFetchService;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
public class GeoShapeQueryParser implements QueryParser {
public class GeoShapeQueryParser implements QueryParser<GeoShapeQueryBuilder> {
public static final String NAME = "geo_shape";
private ShapeFetchService fetchService;
public static class DEFAULTS {
public static final String INDEX_NAME = "shapes";
public static final String SHAPE_FIELD_NAME = "shape";
}
public static final ParseField SHAPE_FIELD = new ParseField("shape");
public static final ParseField STRATEGY_FIELD = new ParseField("strategy");
public static final ParseField RELATION_FIELD = new ParseField("relation");
public static final ParseField INDEXED_SHAPE_FIELD = new ParseField("indexed_shape");
public static final ParseField SHAPE_ID_FIELD = new ParseField("id");
public static final ParseField SHAPE_TYPE_FIELD = new ParseField("type");
public static final ParseField SHAPE_INDEX_FIELD = new ParseField("index");
public static final ParseField SHAPE_PATH_FIELD = new ParseField("path");
@Override
public String[] names() {
return new String[]{NAME, Strings.toCamelCase(NAME)};
return new String[]{GeoShapeQueryBuilder.NAME, Strings.toCamelCase(GeoShapeQueryBuilder.NAME)};
}
@Override
public Query parse(QueryParseContext parseContext) throws IOException, ParsingException {
public GeoShapeQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException {
XContentParser parser = parseContext.parser();
String fieldName = null;
ShapeRelation shapeRelation = ShapeRelation.INTERSECTS;
String strategyName = null;
ShapeBuilder shape = null;
ShapeRelation shapeRelation = null;
SpatialStrategy strategy = null;
BytesReference shape = null;
String id = null;
String type = null;
String index = DEFAULTS.INDEX_NAME;
String shapePath = DEFAULTS.SHAPE_FIELD_NAME;
String index = null;
String shapePath = null;
XContentParser.Token token;
String currentFieldName = null;
float boost = 1f;
float boost = AbstractQueryBuilder.DEFAULT_BOOST;
String queryName = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
@ -84,113 +76,78 @@ public class GeoShapeQueryParser implements QueryParser {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
token = parser.nextToken();
if ("shape".equals(currentFieldName)) {
shape = ShapeBuilder.parse(parser);
} else if ("strategy".equals(currentFieldName)) {
strategyName = parser.text();
} else if ("relation".equals(currentFieldName)) {
if (parseContext.parseFieldMatcher().match(currentFieldName, SHAPE_FIELD)) {
XContentBuilder builder = XContentFactory.contentBuilder(parser.contentType()).copyCurrentStructure(parser);
shape = builder.bytes();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, STRATEGY_FIELD)) {
String strategyName = parser.text();
strategy = SpatialStrategy.fromString(strategyName);
if (strategy == null) {
throw new ParsingException(parser.getTokenLocation(), "Unknown strategy [" + strategyName + " ]");
}
} else if (parseContext.parseFieldMatcher().match(currentFieldName, RELATION_FIELD)) {
shapeRelation = ShapeRelation.getRelationByName(parser.text());
if (shapeRelation == null) {
throw new ParsingException(parseContext, "Unknown shape operation [" + parser.text() + " ]");
throw new ParsingException(parser.getTokenLocation(), "Unknown shape operation [" + parser.text() + " ]");
}
} else if ("indexed_shape".equals(currentFieldName) || "indexedShape".equals(currentFieldName)) {
} else if (parseContext.parseFieldMatcher().match(currentFieldName, INDEXED_SHAPE_FIELD)) {
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token.isValue()) {
if ("id".equals(currentFieldName)) {
if (parseContext.parseFieldMatcher().match(currentFieldName, SHAPE_ID_FIELD)) {
id = parser.text();
} else if ("type".equals(currentFieldName)) {
} else if (parseContext.parseFieldMatcher().match(currentFieldName, SHAPE_TYPE_FIELD)) {
type = parser.text();
} else if ("index".equals(currentFieldName)) {
} else if (parseContext.parseFieldMatcher().match(currentFieldName, SHAPE_INDEX_FIELD)) {
index = parser.text();
} else if ("path".equals(currentFieldName)) {
} else if (parseContext.parseFieldMatcher().match(currentFieldName, SHAPE_PATH_FIELD)) {
shapePath = parser.text();
}
}
}
if (id == null) {
throw new ParsingException(parseContext, "ID for indexed shape not provided");
} else if (type == null) {
throw new ParsingException(parseContext, "Type for indexed shape not provided");
}
GetRequest getRequest = new GetRequest(index, type, id);
getRequest.copyContextAndHeadersFrom(SearchContext.current());
shape = fetchService.fetch(getRequest, shapePath);
} else {
throw new ParsingException(parseContext, "[geo_shape] query does not support [" + currentFieldName + "]");
throw new ParsingException(parser.getTokenLocation(), "[geo_shape] query does not support [" + currentFieldName + "]");
}
}
}
} else if (token.isValue()) {
if ("boost".equals(currentFieldName)) {
if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
boost = parser.floatValue();
} else if ("_name".equals(currentFieldName)) {
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
queryName = parser.text();
} else {
throw new ParsingException(parseContext, "[geo_shape] query does not support [" + currentFieldName + "]");
throw new ParsingException(parser.getTokenLocation(), "[geo_shape] query does not support [" + currentFieldName + "]");
}
}
}
if (shape == null) {
throw new ParsingException(parseContext, "No Shape defined");
} else if (shapeRelation == null) {
throw new ParsingException(parseContext, "No Shape Relation defined");
}
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
if (fieldType == null) {
throw new ParsingException(parseContext, "Failed to find geo_shape field [" + fieldName + "]");
}
// TODO: This isn't the nicest way to check this
if (!(fieldType instanceof GeoShapeFieldMapper.GeoShapeFieldType)) {
throw new ParsingException(parseContext, "Field [" + fieldName + "] is not a geo_shape");
}
GeoShapeFieldMapper.GeoShapeFieldType shapeFieldType = (GeoShapeFieldMapper.GeoShapeFieldType) fieldType;
PrefixTreeStrategy strategy = shapeFieldType.defaultStrategy();
if (strategyName != null) {
strategy = shapeFieldType.resolveStrategy(strategyName);
}
Query query;
if (strategy instanceof RecursivePrefixTreeStrategy && shapeRelation == ShapeRelation.DISJOINT) {
// this strategy doesn't support disjoint anymore: but it did before, including creating lucene fieldcache (!)
// in this case, execute disjoint as exists && !intersects
BooleanQuery.Builder bool = new BooleanQuery.Builder();
Query exists = ExistsQueryParser.newFilter(parseContext, fieldName, null);
Filter intersects = strategy.makeFilter(getArgs(shape, ShapeRelation.INTERSECTS));
bool.add(exists, BooleanClause.Occur.MUST);
bool.add(intersects, BooleanClause.Occur.MUST_NOT);
query = new ConstantScoreQuery(bool.build());
GeoShapeQueryBuilder builder;
if (shape != null) {
builder = new GeoShapeQueryBuilder(fieldName, shape);
} else {
query = strategy.makeQuery(getArgs(shape, shapeRelation));
builder = new GeoShapeQueryBuilder(fieldName, id, type);
}
if (index != null) {
builder.indexedShapeIndex(index);
}
if (shapePath != null) {
builder.indexedShapePath(shapePath);
}
if (shapeRelation != null) {
builder.relation(shapeRelation);
}
if (strategy != null) {
builder.strategy(strategy);
}
query.setBoost(boost);
if (queryName != null) {
parseContext.addNamedQuery(queryName, query);
builder.queryName(queryName);
}
return query;
builder.boost(boost);
return builder;
}
@Inject(optional = true)
public void setFetchService(@Nullable ShapeFetchService fetchService) {
this.fetchService = fetchService;
}
public static SpatialArgs getArgs(ShapeBuilder shape, ShapeRelation relation) {
switch(relation) {
case DISJOINT:
return new SpatialArgs(SpatialOperation.IsDisjointTo, shape.build());
case INTERSECTS:
return new SpatialArgs(SpatialOperation.Intersects, shape.build());
case WITHIN:
return new SpatialArgs(SpatialOperation.IsWithin, shape.build());
default:
throw new IllegalArgumentException("");
}
@Override
public GeoShapeQueryBuilder getBuilderPrototype() {
return GeoShapeQueryBuilder.PROTOTYPE;
}
}

View File

@ -0,0 +1,89 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.query;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.util.CollectionUtils;
import java.io.IOException;
/**
* This enum is used to determine how to deal with invalid geo coordinates in geo related
* queries:
*
* On STRICT validation invalid coordinates cause an exception to be thrown.
* On IGNORE_MALFORMED invalid coordinates are being accepted.
* On COERCE invalid coordinates are being corrected to the most likely valid coordinate.
* */
public enum GeoValidationMethod implements Writeable<GeoValidationMethod>{
COERCE, IGNORE_MALFORMED, STRICT;
public static final GeoValidationMethod DEFAULT = STRICT;
public static final boolean DEFAULT_LENIENT_PARSING = (DEFAULT != STRICT);
private static final GeoValidationMethod PROTOTYPE = DEFAULT;
@Override
public GeoValidationMethod readFrom(StreamInput in) throws IOException {
return GeoValidationMethod.values()[in.readVInt()];
}
public static GeoValidationMethod readGeoValidationMethodFrom(StreamInput in) throws IOException {
return PROTOTYPE.readFrom(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeVInt(this.ordinal());
}
public static GeoValidationMethod fromString(String op) {
for (GeoValidationMethod method : GeoValidationMethod.values()) {
if (method.name().equalsIgnoreCase(op)) {
return method;
}
}
throw new IllegalArgumentException("operator needs to be either " + CollectionUtils.arrayAsArrayList(GeoValidationMethod.values())
+ ", but not [" + op + "]");
}
/** Returns whether or not to skip bounding box validation. */
public static boolean isIgnoreMalformed(GeoValidationMethod method) {
return (method == GeoValidationMethod.IGNORE_MALFORMED || method == GeoValidationMethod.COERCE);
}
/** Returns whether or not to try and fix broken/wrapping bounding boxes. */
public static boolean isCoerce(GeoValidationMethod method) {
return method == GeoValidationMethod.COERCE;
}
/** Returns validation method corresponding to given coerce and ignoreMalformed values. */
public static GeoValidationMethod infer(boolean coerce, boolean ignoreMalformed) {
if (coerce) {
return GeoValidationMethod.COERCE;
} else if (ignoreMalformed) {
return GeoValidationMethod.IGNORE_MALFORMED;
} else {
return GeoValidationMethod.STRICT;
}
}
}

View File

@ -23,11 +23,13 @@ import org.apache.lucene.search.Query;
import org.apache.lucene.util.XGeoHashUtils;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.geo.GeoUtils;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
@ -38,6 +40,7 @@ import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
/**
* A geohash cell filter that filters {@link GeoPoint}s by their geohashes. Basically the a
@ -57,8 +60,9 @@ import java.util.List;
public class GeohashCellQuery {
public static final String NAME = "geohash_cell";
public static final String NEIGHBORS = "neighbors";
public static final String PRECISION = "precision";
public static final ParseField NEIGHBORS_FIELD = new ParseField("neighbors");
public static final ParseField PRECISION_FIELD = new ParseField("precision");
public static final boolean DEFAULT_NEIGHBORS = false;
/**
* Create a new geohash filter for a given set of geohashes. In general this method
@ -70,7 +74,7 @@ public class GeohashCellQuery {
* @param geohashes optional array of additional geohashes
* @return a new GeoBoundinboxfilter
*/
public static Query create(QueryParseContext context, GeoPointFieldMapper.GeoPointFieldType fieldType, String geohash, @Nullable List<CharSequence> geohashes) {
public static Query create(QueryShardContext context, GeoPointFieldMapper.GeoPointFieldType fieldType, String geohash, @Nullable List<CharSequence> geohashes) {
MappedFieldType geoHashMapper = fieldType.geohashFieldType();
if (geoHashMapper == null) {
throw new IllegalArgumentException("geohash filter needs geohash_prefix to be enabled");
@ -89,23 +93,20 @@ public class GeohashCellQuery {
* <code>geohash</code> to be set. the default for a neighbor filteing is
* <code>false</code>.
*/
public static class Builder extends QueryBuilder {
public static class Builder extends AbstractQueryBuilder<Builder> {
// we need to store the geohash rather than the corresponding point,
// because a transformation from a geohash to a point an back to the
// geohash will extend the accuracy of the hash to max precision
// i.e. by filing up with z's.
private String field;
private String fieldName;
private String geohash;
private int levels = -1;
private boolean neighbors;
private Integer levels = null;
private boolean neighbors = DEFAULT_NEIGHBORS;
private static final Builder PROTOTYPE = new Builder("field", new GeoPoint());
public Builder(String field) {
this(field, null, false);
}
public Builder(String field, GeoPoint point) {
this(field, point.geohash(), false);
this(field, point == null ? null : point.geohash(), false);
}
public Builder(String field, String geohash) {
@ -113,8 +114,13 @@ public class GeohashCellQuery {
}
public Builder(String field, String geohash, boolean neighbors) {
super();
this.field = field;
if (Strings.isEmpty(field)) {
throw new IllegalArgumentException("fieldName must not be null");
}
if (Strings.isEmpty(geohash)) {
throw new IllegalArgumentException("geohash or point must be defined");
}
this.fieldName = field;
this.geohash = geohash;
this.neighbors = neighbors;
}
@ -134,11 +140,22 @@ public class GeohashCellQuery {
return this;
}
public String geohash() {
return geohash;
}
public Builder precision(int levels) {
if (levels <= 0) {
throw new IllegalArgumentException("precision must be greater than 0. Found [" + levels + "]");
}
this.levels = levels;
return this;
}
public Integer precision() {
return levels;
}
public Builder precision(String precision) {
double meters = DistanceUnit.parse(precision, DistanceUnit.DEFAULT, DistanceUnit.METERS);
return precision(GeoUtils.geoHashLevelsForPrecision(meters));
@ -149,27 +166,107 @@ public class GeohashCellQuery {
return this;
}
public Builder field(String field) {
this.field = field;
public boolean neighbors() {
return neighbors;
}
public Builder fieldName(String fieldName) {
this.fieldName = fieldName;
return this;
}
public String fieldName() {
return fieldName;
}
@Override
protected Query doToQuery(QueryShardContext context) throws IOException {
MappedFieldType fieldType = context.fieldMapper(fieldName);
if (fieldType == null) {
throw new QueryShardException(context, "failed to parse [{}] query. missing [{}] field [{}]", NAME,
GeoPointFieldMapper.CONTENT_TYPE, fieldName);
}
if (!(fieldType instanceof GeoPointFieldMapper.GeoPointFieldType)) {
throw new QueryShardException(context, "failed to parse [{}] query. field [{}] is not a geo_point field", NAME, fieldName);
}
GeoPointFieldMapper.GeoPointFieldType geoFieldType = ((GeoPointFieldMapper.GeoPointFieldType) fieldType);
if (!geoFieldType.isGeohashPrefixEnabled()) {
throw new QueryShardException(context, "failed to parse [{}] query. [geohash_prefix] is not enabled for field [{}]", NAME,
fieldName);
}
if (levels != null) {
int len = Math.min(levels, geohash.length());
geohash = geohash.substring(0, len);
}
Query query;
if (neighbors) {
query = create(context, geoFieldType, geohash, XGeoHashUtils.addNeighbors(geohash, new ArrayList<CharSequence>(8)));
} else {
query = create(context, geoFieldType, geohash, null);
}
return query;
}
@Override
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(NAME);
if (neighbors) {
builder.field(NEIGHBORS, neighbors);
builder.field(NEIGHBORS_FIELD.getPreferredName(), neighbors);
if (levels != null) {
builder.field(PRECISION_FIELD.getPreferredName(), levels);
}
if(levels > 0) {
builder.field(PRECISION, levels);
}
builder.field(field, geohash);
builder.field(fieldName, geohash);
printBoostAndQueryName(builder);
builder.endObject();
}
@Override
protected Builder doReadFrom(StreamInput in) throws IOException {
String field = in.readString();
String geohash = in.readString();
Builder builder = new Builder(field, geohash);
if (in.readBoolean()) {
builder.precision(in.readVInt());
}
builder.neighbors(in.readBoolean());
return builder;
}
@Override
protected void doWriteTo(StreamOutput out) throws IOException {
out.writeString(fieldName);
out.writeString(geohash);
boolean hasLevels = levels != null;
out.writeBoolean(hasLevels);
if (hasLevels) {
out.writeVInt(levels);
}
out.writeBoolean(neighbors);
}
@Override
protected boolean doEquals(Builder other) {
return Objects.equals(fieldName, other.fieldName)
&& Objects.equals(geohash, other.geohash)
&& Objects.equals(levels, other.levels)
&& Objects.equals(neighbors, other.neighbors);
}
@Override
protected int doHashCode() {
return Objects.hash(fieldName, geohash, levels, neighbors);
}
@Override
public String getWriteableName() {
return NAME;
}
}
public static class Parser implements QueryParser {
public static class Parser implements QueryParser<Builder> {
@Inject
public Parser() {
@ -181,14 +278,15 @@ public class GeohashCellQuery {
}
@Override
public Query parse(QueryParseContext parseContext) throws IOException, ParsingException {
public Builder fromXContent(QueryParseContext parseContext) throws IOException {
XContentParser parser = parseContext.parser();
String fieldName = null;
String geohash = null;
int levels = -1;
boolean neighbors = false;
Integer levels = null;
Boolean neighbors = null;
String queryName = null;
Float boost = null;
XContentParser.Token token;
if ((token = parser.currentToken()) != Token.START_OBJECT) {
@ -201,24 +299,31 @@ public class GeohashCellQuery {
if (parseContext.isDeprecatedSetting(field)) {
// skip
} else if (PRECISION.equals(field)) {
} else if (parseContext.parseFieldMatcher().match(field, PRECISION_FIELD)) {
token = parser.nextToken();
if(token == Token.VALUE_NUMBER) {
if (token == Token.VALUE_NUMBER) {
levels = parser.intValue();
} else if(token == Token.VALUE_STRING) {
} else if (token == Token.VALUE_STRING) {
double meters = DistanceUnit.parse(parser.text(), DistanceUnit.DEFAULT, DistanceUnit.METERS);
levels = GeoUtils.geoHashLevelsForPrecision(meters);
}
} else if (NEIGHBORS.equals(field)) {
} else if (parseContext.parseFieldMatcher().match(field, NEIGHBORS_FIELD)) {
parser.nextToken();
neighbors = parser.booleanValue();
} else if (parseContext.parseFieldMatcher().match(field, AbstractQueryBuilder.NAME_FIELD)) {
parser.nextToken();
queryName = parser.text();
} else if (parseContext.parseFieldMatcher().match(field, AbstractQueryBuilder.BOOST_FIELD)) {
parser.nextToken();
boost = parser.floatValue();
} else {
fieldName = field;
token = parser.nextToken();
if(token == Token.VALUE_STRING) {
// A string indicates either a gehash or a lat/lon string
if (token == Token.VALUE_STRING) {
// A string indicates either a geohash or a lat/lon
// string
String location = parser.text();
if(location.indexOf(",")>0) {
if (location.indexOf(",") > 0) {
geohash = GeoUtils.parseGeoPoint(parser).geohash();
} else {
geohash = location;
@ -231,38 +336,25 @@ public class GeohashCellQuery {
throw new ElasticsearchParseException("failed to parse [{}] query. unexpected token [{}]", NAME, token);
}
}
if (geohash == null) {
throw new ParsingException(parseContext, "failed to parse [{}] query. missing geohash value", NAME);
Builder builder = new Builder(fieldName, geohash);
if (levels != null) {
builder.precision(levels);
}
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
if (fieldType == null) {
throw new ParsingException(parseContext, "failed to parse [{}] query. missing [{}] field [{}]", NAME, GeoPointFieldMapper.CONTENT_TYPE, fieldName);
if (neighbors != null) {
builder.neighbors(neighbors);
}
if (!(fieldType instanceof GeoPointFieldMapper.GeoPointFieldType)) {
throw new ParsingException(parseContext, "failed to parse [{}] query. field [{}] is not a geo_point field", NAME, fieldName);
if (queryName != null) {
builder.queryName(queryName);
}
GeoPointFieldMapper.GeoPointFieldType geoFieldType = ((GeoPointFieldMapper.GeoPointFieldType) fieldType);
if (!geoFieldType.isGeohashPrefixEnabled()) {
throw new ParsingException(parseContext, "failed to parse [{}] query. [geohash_prefix] is not enabled for field [{}]", NAME, fieldName);
if (boost != null) {
builder.boost(boost);
}
return builder;
}
if(levels > 0) {
int len = Math.min(levels, geohash.length());
geohash = geohash.substring(0, len);
}
Query filter;
if (neighbors) {
filter = create(parseContext, geoFieldType, geohash, XGeoHashUtils.addNeighbors(geohash, new ArrayList<>(8)));
} else {
filter = create(parseContext, geoFieldType, geohash, null);
}
return filter;
@Override
public GeohashCellQuery.Builder getBuilderPrototype() {
return Builder.PROTOTYPE;
}
}
}

View File

@ -18,48 +18,92 @@
*/
package org.elasticsearch.index.query;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.MultiDocValues;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.join.JoinUtil;
import org.apache.lucene.search.join.ScoreMode;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.query.support.QueryInnerHitBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.IndexParentChildFieldData;
import org.elasticsearch.index.fielddata.plain.ParentChildIndexFieldData;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
import org.elasticsearch.index.query.support.QueryInnerHits;
import org.elasticsearch.search.fetch.innerhits.InnerHitsContext;
import org.elasticsearch.search.fetch.innerhits.InnerHitsSubSearchContext;
import java.io.IOException;
import java.util.Locale;
import java.util.Objects;
public class HasChildQueryBuilder extends QueryBuilder implements BoostableQueryBuilder<HasChildQueryBuilder> {
private final QueryBuilder queryBuilder;
private String childType;
private float boost = 1.0f;
private String scoreMode;
private Integer minChildren;
private Integer maxChildren;
private String queryName;
private QueryInnerHitBuilder innerHit = null;
public HasChildQueryBuilder(String type, QueryBuilder queryBuilder) {
this.childType = type;
this.queryBuilder = queryBuilder;
}
/**
* A query builder for <tt>has_child</tt> queries.
*/
public class HasChildQueryBuilder extends AbstractQueryBuilder<HasChildQueryBuilder> {
/**
* Sets the boost for this query. Documents matching this query will (in addition to the normal
* weightings) have their score multiplied by the boost provided.
* The queries name
*/
@Override
public HasChildQueryBuilder boost(float boost) {
this.boost = boost;
return this;
public static final String NAME = "has_child";
/**
* The default maximum number of children that are required to match for the parent to be considered a match.
*/
public static final int DEFAULT_MAX_CHILDREN = Integer.MAX_VALUE;
/**
* The default minimum number of children that are required to match for the parent to be considered a match.
*/
public static final int DEFAULT_MIN_CHILDREN = 0;
/*
* The default score mode that is used to combine score coming from multiple parent documents.
*/
public static final ScoreMode DEFAULT_SCORE_MODE = ScoreMode.None;
private final QueryBuilder query;
private final String type;
private ScoreMode scoreMode = DEFAULT_SCORE_MODE;
private int minChildren = DEFAULT_MIN_CHILDREN;
private int maxChildren = DEFAULT_MAX_CHILDREN;
private QueryInnerHits queryInnerHits;
static final HasChildQueryBuilder PROTOTYPE = new HasChildQueryBuilder("", EmptyQueryBuilder.PROTOTYPE);
public HasChildQueryBuilder(String type, QueryBuilder query, int maxChildren, int minChildren, ScoreMode scoreMode, QueryInnerHits queryInnerHits) {
this(type, query);
scoreMode(scoreMode);
this.maxChildren = maxChildren;
this.minChildren = minChildren;
this.queryInnerHits = queryInnerHits;
}
public HasChildQueryBuilder(String type, QueryBuilder query) {
if (type == null) {
throw new IllegalArgumentException("[" + NAME + "] requires 'type' field");
}
if (query == null) {
throw new IllegalArgumentException("[" + NAME + "] requires 'query' field");
}
this.type = type;
this.query = query;
}
/**
* Defines how the scores from the matching child documents are mapped into the parent document.
*/
public HasChildQueryBuilder scoreMode(String scoreMode) {
public HasChildQueryBuilder scoreMode(ScoreMode scoreMode) {
if (scoreMode == null) {
throw new IllegalArgumentException("[" + NAME + "] requires 'score_mode' field");
}
this.scoreMode = scoreMode;
return this;
}
@ -68,6 +112,9 @@ public class HasChildQueryBuilder extends QueryBuilder implements BoostableQuery
* Defines the minimum number of children that are required to match for the parent to be considered a match.
*/
public HasChildQueryBuilder minChildren(int minChildren) {
if (minChildren < 0) {
throw new IllegalArgumentException("[" + NAME + "] requires non-negative 'min_children' field");
}
this.minChildren = minChildren;
return this;
}
@ -76,6 +123,9 @@ public class HasChildQueryBuilder extends QueryBuilder implements BoostableQuery
* Defines the maximum number of children that are required to match for the parent to be considered a match.
*/
public HasChildQueryBuilder maxChildren(int maxChildren) {
if (maxChildren < 0) {
throw new IllegalArgumentException("[" + NAME + "] requires non-negative 'max_children' field");
}
this.maxChildren = maxChildren;
return this;
}
@ -83,45 +133,252 @@ public class HasChildQueryBuilder extends QueryBuilder implements BoostableQuery
/**
* Sets the query name for the filter that can be used when searching for matched_filters per hit.
*/
public HasChildQueryBuilder queryName(String queryName) {
this.queryName = queryName;
public HasChildQueryBuilder innerHit(QueryInnerHits queryInnerHits) {
this.queryInnerHits = queryInnerHits;
return this;
}
/**
* Sets inner hit definition in the scope of this query and reusing the defined type and query.
* Returns inner hit definition in the scope of this query and reusing the defined type and query.
*/
public HasChildQueryBuilder innerHit(QueryInnerHitBuilder innerHit) {
this.innerHit = innerHit;
return this;
public QueryInnerHits innerHit() {
return queryInnerHits;
}
/**
* Returns the children query to execute.
*/
public QueryBuilder query() {
return query;
}
/**
* Returns the child type
*/
public String childType() {
return type;
}
/**
* Returns how the scores from the matching child documents are mapped into the parent document.
*/
public ScoreMode scoreMode() {
return scoreMode;
}
/**
* Returns the minimum number of children that are required to match for the parent to be considered a match.
* The default is {@value #DEFAULT_MAX_CHILDREN}
*/
public int minChildren() {
return minChildren;
}
/**
* Returns the maximum number of children that are required to match for the parent to be considered a match.
* The default is {@value #DEFAULT_MIN_CHILDREN}
*/
public int maxChildren() { return maxChildren; }
@Override
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(HasChildQueryParser.NAME);
builder.startObject(NAME);
builder.field("query");
queryBuilder.toXContent(builder, params);
builder.field("child_type", childType);
if (boost != 1.0f) {
builder.field("boost", boost);
}
if (scoreMode != null) {
builder.field("score_mode", scoreMode);
}
if (minChildren != null) {
builder.field("min_children", minChildren);
}
if (maxChildren != null) {
builder.field("max_children", maxChildren);
}
if (queryName != null) {
builder.field("_name", queryName);
}
if (innerHit != null) {
builder.startObject("inner_hits");
builder.value(innerHit);
builder.endObject();
query.toXContent(builder, params);
builder.field("child_type", type);
builder.field("score_mode", scoreMode.name().toLowerCase(Locale.ROOT));
builder.field("min_children", minChildren);
builder.field("max_children", maxChildren);
printBoostAndQueryName(builder);
if (queryInnerHits != null) {
queryInnerHits.toXContent(builder, params);
}
builder.endObject();
}
@Override
public String getWriteableName() {
return NAME;
}
@Override
protected Query doToQuery(QueryShardContext context) throws IOException {
Query innerQuery = query.toQuery(context);
if (innerQuery == null) {
return null;
}
innerQuery.setBoost(boost);
DocumentMapper childDocMapper = context.mapperService().documentMapper(type);
if (childDocMapper == null) {
throw new QueryShardException(context, "[" + NAME + "] no mapping found for type [" + type + "]");
}
ParentFieldMapper parentFieldMapper = childDocMapper.parentFieldMapper();
if (parentFieldMapper.active() == false) {
throw new QueryShardException(context, "[" + NAME + "] _parent field has no parent type configured");
}
if (queryInnerHits != null) {
try (XContentParser parser = queryInnerHits.getXcontentParser()) {
XContentParser.Token token = parser.nextToken();
if (token != XContentParser.Token.START_OBJECT) {
throw new IllegalStateException("start object expected but was: [" + token + "]");
}
InnerHitsSubSearchContext innerHits = context.indexQueryParserService().getInnerHitsQueryParserHelper().parse(parser);
if (innerHits != null) {
ParsedQuery parsedQuery = new ParsedQuery(innerQuery, context.copyNamedQueries());
InnerHitsContext.ParentChildInnerHits parentChildInnerHits = new InnerHitsContext.ParentChildInnerHits(innerHits.getSubSearchContext(), parsedQuery, null, context.mapperService(), childDocMapper);
String name = innerHits.getName() != null ? innerHits.getName() : type;
context.addInnerHits(name, parentChildInnerHits);
}
}
}
String parentType = parentFieldMapper.type();
DocumentMapper parentDocMapper = context.mapperService().documentMapper(parentType);
if (parentDocMapper == null) {
throw new QueryShardException(context, "[" + NAME + "] Type [" + type + "] points to a non existent parent type ["
+ parentType + "]");
}
if (maxChildren > 0 && maxChildren < minChildren) {
throw new QueryShardException(context, "[" + NAME + "] 'max_children' is less than 'min_children'");
}
// wrap the query with type query
innerQuery = Queries.filtered(innerQuery, childDocMapper.typeFilter());
final ParentChildIndexFieldData parentChildIndexFieldData = context.getForField(parentFieldMapper.fieldType());
int maxChildren = maxChildren();
// 0 in pre 2.x p/c impl means unbounded
if (maxChildren == 0) {
maxChildren = Integer.MAX_VALUE;
}
return new LateParsingQuery(parentDocMapper.typeFilter(), innerQuery, minChildren(), maxChildren, parentType, scoreMode, parentChildIndexFieldData);
}
final static class LateParsingQuery extends Query {
private final Query toQuery;
private final Query innerQuery;
private final int minChildren;
private final int maxChildren;
private final String parentType;
private final ScoreMode scoreMode;
private final ParentChildIndexFieldData parentChildIndexFieldData;
LateParsingQuery(Query toQuery, Query innerQuery, int minChildren, int maxChildren, String parentType, ScoreMode scoreMode, ParentChildIndexFieldData parentChildIndexFieldData) {
this.toQuery = toQuery;
this.innerQuery = innerQuery;
this.minChildren = minChildren;
this.maxChildren = maxChildren;
this.parentType = parentType;
this.scoreMode = scoreMode;
this.parentChildIndexFieldData = parentChildIndexFieldData;
}
@Override
public Query rewrite(IndexReader reader) throws IOException {
if (getBoost() != 1.0F) {
return super.rewrite(reader);
}
String joinField = ParentFieldMapper.joinField(parentType);
IndexSearcher indexSearcher = new IndexSearcher(reader);
indexSearcher.setQueryCache(null);
IndexParentChildFieldData indexParentChildFieldData = parentChildIndexFieldData.loadGlobal(indexSearcher.getIndexReader());
MultiDocValues.OrdinalMap ordinalMap = ParentChildIndexFieldData.getOrdinalMap(indexParentChildFieldData, parentType);
return JoinUtil.createJoinQuery(joinField, innerQuery, toQuery, indexSearcher, scoreMode, ordinalMap, minChildren, maxChildren);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
if (!super.equals(o)) return false;
LateParsingQuery that = (LateParsingQuery) o;
if (minChildren != that.minChildren) return false;
if (maxChildren != that.maxChildren) return false;
if (!toQuery.equals(that.toQuery)) return false;
if (!innerQuery.equals(that.innerQuery)) return false;
if (!parentType.equals(that.parentType)) return false;
return scoreMode == that.scoreMode;
}
@Override
public int hashCode() {
int result = super.hashCode();
result = 31 * result + toQuery.hashCode();
result = 31 * result + innerQuery.hashCode();
result = 31 * result + minChildren;
result = 31 * result + maxChildren;
result = 31 * result + parentType.hashCode();
result = 31 * result + scoreMode.hashCode();
return result;
}
@Override
public String toString(String s) {
return "LateParsingQuery {parentType=" + parentType + "}";
}
public int getMinChildren() {
return minChildren;
}
public int getMaxChildren() {
return maxChildren;
}
public ScoreMode getScoreMode() {
return scoreMode;
}
}
@Override
protected boolean doEquals(HasChildQueryBuilder that) {
return Objects.equals(query, that.query)
&& Objects.equals(type, that.type)
&& Objects.equals(scoreMode, that.scoreMode)
&& Objects.equals(minChildren, that.minChildren)
&& Objects.equals(maxChildren, that.maxChildren)
&& Objects.equals(queryInnerHits, that.queryInnerHits);
}
@Override
protected int doHashCode() {
return Objects.hash(query, type, scoreMode, minChildren, maxChildren, queryInnerHits);
}
protected HasChildQueryBuilder(StreamInput in) throws IOException {
type = in.readString();
minChildren = in.readInt();
maxChildren = in.readInt();
final int ordinal = in.readVInt();
scoreMode = ScoreMode.values()[ordinal];
query = in.readQuery();
if (in.readBoolean()) {
queryInnerHits = new QueryInnerHits(in);
}
}
@Override
protected HasChildQueryBuilder doReadFrom(StreamInput in) throws IOException {
return new HasChildQueryBuilder(in);
}
@Override
protected void doWriteTo(StreamOutput out) throws IOException {
out.writeString(type);
out.writeInt(minChildren());
out.writeInt(maxChildren());
out.writeVInt(scoreMode.ordinal());
out.writeQuery(query);
if (queryInnerHits != null) {
out.writeBoolean(true);
queryInnerHits.writeTo(out);
} else {
out.writeBoolean(false);
}
}
}

View File

@ -19,82 +19,52 @@
package org.elasticsearch.index.query;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.MultiDocValues;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.join.JoinUtil;
import org.apache.lucene.search.join.ScoreMode;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.IndexParentChildFieldData;
import org.elasticsearch.index.fielddata.plain.ParentChildIndexFieldData;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
import org.elasticsearch.index.query.support.InnerHitsQueryParserHelper;
import org.elasticsearch.index.query.support.XContentStructure;
import org.elasticsearch.search.fetch.innerhits.InnerHitsContext;
import org.elasticsearch.search.fetch.innerhits.InnerHitsSubSearchContext;
import org.elasticsearch.index.query.support.QueryInnerHits;
import java.io.IOException;
/**
*
* A query parser for <tt>has_child</tt> queries.
*/
public class HasChildQueryParser implements QueryParser {
public class HasChildQueryParser implements QueryParser<HasChildQueryBuilder> {
public static final String NAME = "has_child";
private static final ParseField QUERY_FIELD = new ParseField("query", "filter");
private final InnerHitsQueryParserHelper innerHitsQueryParserHelper;
@Inject
public HasChildQueryParser(InnerHitsQueryParserHelper innerHitsQueryParserHelper) {
this.innerHitsQueryParserHelper = innerHitsQueryParserHelper;
}
@Override
public String[] names() {
return new String[] { NAME, Strings.toCamelCase(NAME) };
return new String[] { HasChildQueryBuilder.NAME, Strings.toCamelCase(HasChildQueryBuilder.NAME) };
}
@Override
public Query parse(QueryParseContext parseContext) throws IOException, ParsingException {
public HasChildQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException {
XContentParser parser = parseContext.parser();
boolean queryFound = false;
float boost = 1.0f;
float boost = AbstractQueryBuilder.DEFAULT_BOOST;
String childType = null;
ScoreMode scoreMode = ScoreMode.None;
int minChildren = 0;
int maxChildren = Integer.MAX_VALUE;
ScoreMode scoreMode = HasChildQueryBuilder.DEFAULT_SCORE_MODE;
int minChildren = HasChildQueryBuilder.DEFAULT_MIN_CHILDREN;
int maxChildren = HasChildQueryBuilder.DEFAULT_MAX_CHILDREN;
String queryName = null;
InnerHitsSubSearchContext innerHits = null;
QueryInnerHits queryInnerHits = null;
String currentFieldName = null;
XContentParser.Token token;
XContentStructure.InnerQuery iq = null;
QueryBuilder iqb = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (parseContext.isDeprecatedSetting(currentFieldName)) {
// skip
} else if (token == XContentParser.Token.START_OBJECT) {
// Usually, the query would be parsed here, but the child
// type may not have been extracted yet, so use the
// XContentStructure.<type> facade to parse if available,
// or delay parsing if not.
if (parseContext.parseFieldMatcher().match(currentFieldName, QUERY_FIELD)) {
iq = new XContentStructure.InnerQuery(parseContext, childType == null ? null : new String[] { childType });
queryFound = true;
iqb = parseContext.parseInnerQueryBuilder();
} else if ("inner_hits".equals(currentFieldName)) {
innerHits = innerHitsQueryParserHelper.parse(parseContext);
queryInnerHits = new QueryInnerHits(parser);
} else {
throw new ParsingException(parseContext, "[has_child] query does not support [" + currentFieldName + "]");
throw new ParsingException(parser.getTokenLocation(), "[has_child] query does not support [" + currentFieldName + "]");
}
} else if (token.isValue()) {
if ("type".equals(currentFieldName) || "child_type".equals(currentFieldName) || "childType".equals(currentFieldName)) {
@ -110,66 +80,14 @@ public class HasChildQueryParser implements QueryParser {
} else if ("_name".equals(currentFieldName)) {
queryName = parser.text();
} else {
throw new ParsingException(parseContext, "[has_child] query does not support [" + currentFieldName + "]");
throw new ParsingException(parser.getTokenLocation(), "[has_child] query does not support [" + currentFieldName + "]");
}
}
}
if (!queryFound) {
throw new ParsingException(parseContext, "[has_child] requires 'query' field");
}
if (childType == null) {
throw new ParsingException(parseContext, "[has_child] requires 'type' field");
}
Query innerQuery = iq.asQuery(childType);
if (innerQuery == null) {
return null;
}
innerQuery.setBoost(boost);
DocumentMapper childDocMapper = parseContext.mapperService().documentMapper(childType);
if (childDocMapper == null) {
throw new ParsingException(parseContext, "[has_child] No mapping for for type [" + childType + "]");
}
ParentFieldMapper parentFieldMapper = childDocMapper.parentFieldMapper();
if (parentFieldMapper.active() == false) {
throw new ParsingException(parseContext, "[has_child] _parent field has no parent type configured");
}
if (innerHits != null) {
ParsedQuery parsedQuery = new ParsedQuery(innerQuery, parseContext.copyNamedQueries());
InnerHitsContext.ParentChildInnerHits parentChildInnerHits = new InnerHitsContext.ParentChildInnerHits(innerHits.getSubSearchContext(), parsedQuery, null, parseContext.mapperService(), childDocMapper);
String name = innerHits.getName() != null ? innerHits.getName() : childType;
parseContext.addInnerHits(name, parentChildInnerHits);
}
String parentType = parentFieldMapper.type();
DocumentMapper parentDocMapper = parseContext.mapperService().documentMapper(parentType);
if (parentDocMapper == null) {
throw new ParsingException(parseContext, "[has_child] Type [" + childType + "] points to a non existent parent type ["
+ parentType + "]");
}
if (maxChildren > 0 && maxChildren < minChildren) {
throw new ParsingException(parseContext, "[has_child] 'max_children' is less than 'min_children'");
}
// wrap the query with type query
innerQuery = Queries.filtered(innerQuery, childDocMapper.typeFilter());
final Query query;
final ParentChildIndexFieldData parentChildIndexFieldData = parseContext.getForField(parentFieldMapper.fieldType());
query = joinUtilHelper(parentType, parentChildIndexFieldData, parentDocMapper.typeFilter(), scoreMode, innerQuery, minChildren, maxChildren);
if (queryName != null) {
parseContext.addNamedQuery(queryName, query);
}
query.setBoost(boost);
return query;
}
public static Query joinUtilHelper(String parentType, ParentChildIndexFieldData parentChildIndexFieldData, Query toQuery, ScoreMode scoreMode, Query innerQuery, int minChildren, int maxChildren) throws IOException {
return new LateParsingQuery(toQuery, innerQuery, minChildren, maxChildren, parentType, scoreMode, parentChildIndexFieldData);
HasChildQueryBuilder hasChildQueryBuilder = new HasChildQueryBuilder(childType, iqb, maxChildren, minChildren, scoreMode, queryInnerHits);
hasChildQueryBuilder.queryName(queryName);
hasChildQueryBuilder.boost(boost);
return hasChildQueryBuilder;
}
public static ScoreMode parseScoreMode(String scoreModeString) {
@ -187,64 +105,8 @@ public class HasChildQueryParser implements QueryParser {
throw new IllegalArgumentException("No score mode for child query [" + scoreModeString + "] found");
}
final static class LateParsingQuery extends Query {
private final Query toQuery;
private final Query innerQuery;
private final int minChildren;
private final int maxChildren;
private final String parentType;
private final ScoreMode scoreMode;
private final ParentChildIndexFieldData parentChildIndexFieldData;
private final Object identity = new Object();
LateParsingQuery(Query toQuery, Query innerQuery, int minChildren, int maxChildren, String parentType, ScoreMode scoreMode, ParentChildIndexFieldData parentChildIndexFieldData) {
this.toQuery = toQuery;
this.innerQuery = innerQuery;
this.minChildren = minChildren;
this.maxChildren = maxChildren;
this.parentType = parentType;
this.scoreMode = scoreMode;
this.parentChildIndexFieldData = parentChildIndexFieldData;
}
@Override
public Query rewrite(IndexReader reader) throws IOException {
if (getBoost() != 1.0F) {
return super.rewrite(reader);
}
String joinField = ParentFieldMapper.joinField(parentType);
IndexSearcher indexSearcher = new IndexSearcher(reader);
indexSearcher.setQueryCache(null);
IndexParentChildFieldData indexParentChildFieldData = parentChildIndexFieldData.loadGlobal(indexSearcher.getIndexReader());
MultiDocValues.OrdinalMap ordinalMap = ParentChildIndexFieldData.getOrdinalMap(indexParentChildFieldData, parentType);
return JoinUtil.createJoinQuery(joinField, innerQuery, toQuery, indexSearcher, scoreMode, ordinalMap, minChildren, maxChildren);
}
// Even though we only cache rewritten queries it is good to let all queries implement hashCode() and equals():
// We can't check for actually equality here, since we need to IndexReader for this, but
// that isn't available on all cases during query parse time, so instead rely on identity:
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
if (!super.equals(o)) return false;
LateParsingQuery that = (LateParsingQuery) o;
return identity.equals(that.identity);
}
@Override
public int hashCode() {
int result = super.hashCode();
result = 31 * result + identity.hashCode();
return result;
}
@Override
public String toString(String s) {
return "LateParsingQuery {parentType=" + parentType + "}";
}
@Override
public HasChildQueryBuilder getBuilderPrototype() {
return HasChildQueryBuilder.PROTOTYPE;
}
}

View File

@ -18,83 +18,234 @@
*/
package org.elasticsearch.index.query;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.join.ScoreMode;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.query.support.QueryInnerHitBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.plain.ParentChildIndexFieldData;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
import org.elasticsearch.index.query.support.QueryInnerHits;
import org.elasticsearch.search.fetch.innerhits.InnerHitsContext;
import org.elasticsearch.search.fetch.innerhits.InnerHitsSubSearchContext;
import java.io.IOException;
import java.util.HashSet;
import java.util.Objects;
import java.util.Set;
/**
* Builder for the 'has_parent' query.
*/
public class HasParentQueryBuilder extends QueryBuilder implements BoostableQueryBuilder<HasParentQueryBuilder> {
public class HasParentQueryBuilder extends AbstractQueryBuilder<HasParentQueryBuilder> {
private final QueryBuilder queryBuilder;
private final String parentType;
private String scoreMode;
private float boost = 1.0f;
private String queryName;
private QueryInnerHitBuilder innerHit = null;
public static final String NAME = "has_parent";
public static final boolean DEFAULT_SCORE = false;
private final QueryBuilder query;
private final String type;
private boolean score = DEFAULT_SCORE;
private QueryInnerHits innerHit;
/**
* @param parentType The parent type
* @param parentQuery The query that will be matched with parent documents
* @param type The parent type
* @param query The query that will be matched with parent documents
*/
public HasParentQueryBuilder(String parentType, QueryBuilder parentQuery) {
this.parentType = parentType;
this.queryBuilder = parentQuery;
public HasParentQueryBuilder(String type, QueryBuilder query) {
if (type == null) {
throw new IllegalArgumentException("[" + NAME + "] requires 'parent_type' field");
}
if (query == null) {
throw new IllegalArgumentException("[" + NAME + "] requires 'query' field");
}
this.type = type;
this.query = query;
}
@Override
public HasParentQueryBuilder boost(float boost) {
this.boost = boost;
return this;
public HasParentQueryBuilder(String type, QueryBuilder query, boolean score, QueryInnerHits innerHits) {
this(type, query);
this.score = score;
this.innerHit = innerHits;
}
/**
* Defines how the parent score is mapped into the child documents.
* Defines if the parent score is mapped into the child documents.
*/
public HasParentQueryBuilder scoreMode(String scoreMode) {
this.scoreMode = scoreMode;
return this;
}
/**
* Sets the query name for the filter that can be used when searching for matched_filters per hit.
*/
public HasParentQueryBuilder queryName(String queryName) {
this.queryName = queryName;
public HasParentQueryBuilder score(boolean score) {
this.score = score;
return this;
}
/**
* Sets inner hit definition in the scope of this query and reusing the defined type and query.
*/
public HasParentQueryBuilder innerHit(QueryInnerHitBuilder innerHit) {
public HasParentQueryBuilder innerHit(QueryInnerHits innerHit) {
this.innerHit = innerHit;
return this;
}
/**
* Returns the query to execute.
*/
public QueryBuilder query() {
return query;
}
/**
* Returns <code>true</code> if the parent score is mapped into the child documents
*/
public boolean score() {
return score;
}
/**
* Returns the parents type name
*/
public String type() {
return type;
}
/**
* Returns inner hit definition in the scope of this query and reusing the defined type and query.
*/
public QueryInnerHits innerHit() {
return innerHit;
}
@Override
protected Query doToQuery(QueryShardContext context) throws IOException {
Query innerQuery = query.toQuery(context);
if (innerQuery == null) {
return null;
}
innerQuery.setBoost(boost);
DocumentMapper parentDocMapper = context.mapperService().documentMapper(type);
if (parentDocMapper == null) {
throw new QueryShardException(context, "[has_parent] query configured 'parent_type' [" + type
+ "] is not a valid type");
}
if (innerHit != null) {
try (XContentParser parser = innerHit.getXcontentParser()) {
XContentParser.Token token = parser.nextToken();
if (token != XContentParser.Token.START_OBJECT) {
throw new IllegalStateException("start object expected but was: [" + token + "]");
}
InnerHitsSubSearchContext innerHits = context.indexQueryParserService().getInnerHitsQueryParserHelper().parse(parser);
if (innerHits != null) {
ParsedQuery parsedQuery = new ParsedQuery(innerQuery, context.copyNamedQueries());
InnerHitsContext.ParentChildInnerHits parentChildInnerHits = new InnerHitsContext.ParentChildInnerHits(innerHits.getSubSearchContext(), parsedQuery, null, context.mapperService(), parentDocMapper);
String name = innerHits.getName() != null ? innerHits.getName() : type;
context.addInnerHits(name, parentChildInnerHits);
}
}
}
Set<String> parentTypes = new HashSet<>(5);
parentTypes.add(parentDocMapper.type());
ParentChildIndexFieldData parentChildIndexFieldData = null;
for (DocumentMapper documentMapper : context.mapperService().docMappers(false)) {
ParentFieldMapper parentFieldMapper = documentMapper.parentFieldMapper();
if (parentFieldMapper.active()) {
DocumentMapper parentTypeDocumentMapper = context.mapperService().documentMapper(parentFieldMapper.type());
parentChildIndexFieldData = context.getForField(parentFieldMapper.fieldType());
if (parentTypeDocumentMapper == null) {
// Only add this, if this parentFieldMapper (also a parent) isn't a child of another parent.
parentTypes.add(parentFieldMapper.type());
}
}
}
if (parentChildIndexFieldData == null) {
throw new QueryShardException(context, "[has_parent] no _parent field configured");
}
Query parentTypeQuery = null;
if (parentTypes.size() == 1) {
DocumentMapper documentMapper = context.mapperService().documentMapper(parentTypes.iterator().next());
if (documentMapper != null) {
parentTypeQuery = documentMapper.typeFilter();
}
} else {
BooleanQuery.Builder parentsFilter = new BooleanQuery.Builder();
for (String parentTypeStr : parentTypes) {
DocumentMapper documentMapper = context.mapperService().documentMapper(parentTypeStr);
if (documentMapper != null) {
parentsFilter.add(documentMapper.typeFilter(), BooleanClause.Occur.SHOULD);
}
}
parentTypeQuery = parentsFilter.build();
}
if (parentTypeQuery == null) {
return null;
}
// wrap the query with type query
innerQuery = Queries.filtered(innerQuery, parentDocMapper.typeFilter());
Query childrenFilter = Queries.not(parentTypeQuery);
return new HasChildQueryBuilder.LateParsingQuery(childrenFilter, innerQuery, HasChildQueryBuilder.DEFAULT_MIN_CHILDREN, HasChildQueryBuilder.DEFAULT_MAX_CHILDREN, type, score ? ScoreMode.Max : ScoreMode.None, parentChildIndexFieldData);
}
@Override
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(HasParentQueryParser.NAME);
builder.startObject(NAME);
builder.field("query");
queryBuilder.toXContent(builder, params);
builder.field("parent_type", parentType);
if (scoreMode != null) {
builder.field("score_mode", scoreMode);
}
if (boost != 1.0f) {
builder.field("boost", boost);
}
if (queryName != null) {
builder.field("_name", queryName);
}
query.toXContent(builder, params);
builder.field("parent_type", type);
builder.field("score", score);
printBoostAndQueryName(builder);
if (innerHit != null) {
builder.startObject("inner_hits");
builder.value(innerHit);
builder.endObject();
innerHit.toXContent(builder, params);
}
builder.endObject();
}
}
@Override
public String getWriteableName() {
return NAME;
}
protected HasParentQueryBuilder(StreamInput in) throws IOException {
type = in.readString();
score = in.readBoolean();
query = in.readQuery();
if (in.readBoolean()) {
innerHit = new QueryInnerHits(in);
}
}
@Override
protected HasParentQueryBuilder doReadFrom(StreamInput in) throws IOException {
return new HasParentQueryBuilder(in);
}
@Override
protected void doWriteTo(StreamOutput out) throws IOException {
out.writeString(type);
out.writeBoolean(score);
out.writeQuery(query);
if (innerHit != null) {
out.writeBoolean(true);
innerHit.writeTo(out);
} else {
out.writeBoolean(false);
}
}
@Override
protected boolean doEquals(HasParentQueryBuilder that) {
return Objects.equals(query, that.query)
&& Objects.equals(type, that.type)
&& Objects.equals(score, that.score)
&& Objects.equals(innerHit, that.innerHit);
}
@Override
protected int doHashCode() {
return Objects.hash(query, type, score, innerHit);
}
}

View File

@ -18,178 +18,79 @@
*/
package org.elasticsearch.index.query;
import org.apache.lucene.search.*;
import org.apache.lucene.search.join.ScoreMode;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.plain.ParentChildIndexFieldData;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
import org.elasticsearch.index.query.support.InnerHitsQueryParserHelper;
import org.elasticsearch.index.query.support.XContentStructure;
import org.elasticsearch.search.fetch.innerhits.InnerHitsContext;
import org.elasticsearch.search.fetch.innerhits.InnerHitsSubSearchContext;
import org.elasticsearch.index.query.support.QueryInnerHits;
import java.io.IOException;
import java.util.HashSet;
import java.util.Set;
import static org.elasticsearch.index.query.HasChildQueryParser.joinUtilHelper;
public class HasParentQueryParser implements QueryParser<HasParentQueryBuilder> {
public class HasParentQueryParser implements QueryParser {
public static final String NAME = "has_parent";
private static final HasParentQueryBuilder PROTOTYPE = new HasParentQueryBuilder("", EmptyQueryBuilder.PROTOTYPE);
private static final ParseField QUERY_FIELD = new ParseField("query", "filter");
private final InnerHitsQueryParserHelper innerHitsQueryParserHelper;
@Inject
public HasParentQueryParser(InnerHitsQueryParserHelper innerHitsQueryParserHelper) {
this.innerHitsQueryParserHelper = innerHitsQueryParserHelper;
}
private static final ParseField SCORE_FIELD = new ParseField("score_mode").withAllDeprecated("score");
private static final ParseField TYPE_FIELD = new ParseField("parent_type", "type");
@Override
public String[] names() {
return new String[]{NAME, Strings.toCamelCase(NAME)};
return new String[]{HasParentQueryBuilder.NAME, Strings.toCamelCase(HasParentQueryBuilder.NAME)};
}
@Override
public Query parse(QueryParseContext parseContext) throws IOException, ParsingException {
public HasParentQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException {
XContentParser parser = parseContext.parser();
boolean queryFound = false;
float boost = 1.0f;
float boost = AbstractQueryBuilder.DEFAULT_BOOST;
String parentType = null;
boolean score = false;
boolean score = HasParentQueryBuilder.DEFAULT_SCORE;
String queryName = null;
InnerHitsSubSearchContext innerHits = null;
QueryInnerHits innerHits = null;
String currentFieldName = null;
XContentParser.Token token;
XContentStructure.InnerQuery iq = null;
QueryBuilder iqb = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_OBJECT) {
// Usually, the query would be parsed here, but the child
// type may not have been extracted yet, so use the
// XContentStructure.<type> facade to parse if available,
// or delay parsing if not.
if (parseContext.parseFieldMatcher().match(currentFieldName, QUERY_FIELD)) {
iq = new XContentStructure.InnerQuery(parseContext, parentType == null ? null : new String[] {parentType});
queryFound = true;
iqb = parseContext.parseInnerQueryBuilder();
} else if ("inner_hits".equals(currentFieldName)) {
innerHits = innerHitsQueryParserHelper.parse(parseContext);
innerHits = new QueryInnerHits(parser);
} else {
throw new ParsingException(parseContext, "[has_parent] query does not support [" + currentFieldName + "]");
throw new ParsingException(parser.getTokenLocation(), "[has_parent] query does not support [" + currentFieldName + "]");
}
} else if (token.isValue()) {
if ("type".equals(currentFieldName) || "parent_type".equals(currentFieldName) || "parentType".equals(currentFieldName)) {
if (parseContext.parseFieldMatcher().match(currentFieldName, TYPE_FIELD)) {
parentType = parser.text();
} else if ("score_mode".equals(currentFieldName) || "scoreMode".equals(currentFieldName)) {
} else if (parseContext.parseFieldMatcher().match(currentFieldName, SCORE_FIELD)) {
String scoreModeValue = parser.text();
if ("score".equals(scoreModeValue)) {
score = true;
} else if ("none".equals(scoreModeValue)) {
score = false;
} else {
throw new ParsingException(parser.getTokenLocation(), "[has_parent] query does not support [" + scoreModeValue + "] as an option for score_mode");
}
} else if ("score".equals(currentFieldName)) {
score = parser.booleanValue();
} else if ("boost".equals(currentFieldName)) {
boost = parser.floatValue();
} else if ("_name".equals(currentFieldName)) {
queryName = parser.text();
} else {
throw new ParsingException(parseContext, "[has_parent] query does not support [" + currentFieldName + "]");
throw new ParsingException(parser.getTokenLocation(), "[has_parent] query does not support [" + currentFieldName + "]");
}
}
}
if (!queryFound) {
throw new ParsingException(parseContext, "[has_parent] query requires 'query' field");
}
if (parentType == null) {
throw new ParsingException(parseContext, "[has_parent] query requires 'parent_type' field");
}
Query innerQuery = iq.asQuery(parentType);
if (innerQuery == null) {
return null;
}
innerQuery.setBoost(boost);
Query query = createParentQuery(innerQuery, parentType, score, parseContext, innerHits);
if (query == null) {
return null;
}
query.setBoost(boost);
if (queryName != null) {
parseContext.addNamedQuery(queryName, query);
}
return query;
return new HasParentQueryBuilder(parentType, iqb, score, innerHits).queryName(queryName).boost(boost);
}
static Query createParentQuery(Query innerQuery, String parentType, boolean score, QueryParseContext parseContext, InnerHitsSubSearchContext innerHits) throws IOException {
DocumentMapper parentDocMapper = parseContext.mapperService().documentMapper(parentType);
if (parentDocMapper == null) {
throw new ParsingException(parseContext, "[has_parent] query configured 'parent_type' [" + parentType
+ "] is not a valid type");
}
if (innerHits != null) {
ParsedQuery parsedQuery = new ParsedQuery(innerQuery, parseContext.copyNamedQueries());
InnerHitsContext.ParentChildInnerHits parentChildInnerHits = new InnerHitsContext.ParentChildInnerHits(innerHits.getSubSearchContext(), parsedQuery, null, parseContext.mapperService(), parentDocMapper);
String name = innerHits.getName() != null ? innerHits.getName() : parentType;
parseContext.addInnerHits(name, parentChildInnerHits);
}
Set<String> parentTypes = new HashSet<>(5);
parentTypes.add(parentDocMapper.type());
ParentChildIndexFieldData parentChildIndexFieldData = null;
for (DocumentMapper documentMapper : parseContext.mapperService().docMappers(false)) {
ParentFieldMapper parentFieldMapper = documentMapper.parentFieldMapper();
if (parentFieldMapper.active()) {
DocumentMapper parentTypeDocumentMapper = parseContext.mapperService().documentMapper(parentFieldMapper.type());
parentChildIndexFieldData = parseContext.getForField(parentFieldMapper.fieldType());
if (parentTypeDocumentMapper == null) {
// Only add this, if this parentFieldMapper (also a parent) isn't a child of another parent.
parentTypes.add(parentFieldMapper.type());
}
}
}
if (parentChildIndexFieldData == null) {
throw new ParsingException(parseContext, "[has_parent] no _parent field configured");
}
Query parentTypeQuery = null;
if (parentTypes.size() == 1) {
DocumentMapper documentMapper = parseContext.mapperService().documentMapper(parentTypes.iterator().next());
if (documentMapper != null) {
parentTypeQuery = documentMapper.typeFilter();
}
} else {
BooleanQuery.Builder parentsFilter = new BooleanQuery.Builder();
for (String parentTypeStr : parentTypes) {
DocumentMapper documentMapper = parseContext.mapperService().documentMapper(parentTypeStr);
if (documentMapper != null) {
parentsFilter.add(documentMapper.typeFilter(), BooleanClause.Occur.SHOULD);
}
}
parentTypeQuery = parentsFilter.build();
}
if (parentTypeQuery == null) {
return null;
}
// wrap the query with type query
innerQuery = Queries.filtered(innerQuery, parentDocMapper.typeFilter());
Query childrenFilter = Queries.not(parentTypeQuery);
ScoreMode scoreMode = score ? ScoreMode.Max : ScoreMode.None;
return joinUtilHelper(parentType, parentChildIndexFieldData, childrenFilter, scoreMode, innerQuery, 0, Integer.MAX_VALUE);
@Override
public HasParentQueryBuilder getBuilderPrototype() {
return PROTOTYPE;
}
}

View File

@ -19,44 +19,60 @@
package org.elasticsearch.index.query;
import org.apache.lucene.queries.TermsQuery;
import org.apache.lucene.search.Query;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.*;
/**
* A query that will return only documents matching specific ids (and a type).
*/
public class IdsQueryBuilder extends QueryBuilder implements BoostableQueryBuilder<IdsQueryBuilder> {
public class IdsQueryBuilder extends AbstractQueryBuilder<IdsQueryBuilder> {
private final List<String> types;
public static final String NAME = "ids";
private List<String> values = new ArrayList<>();
private final Set<String> ids = new HashSet<>();
private float boost = -1;
private final String[] types;
private String queryName;
static final IdsQueryBuilder PROTOTYPE = new IdsQueryBuilder();
public IdsQueryBuilder(String... types) {
this.types = types == null ? null : Arrays.asList(types);
/**
* Creates a new IdsQueryBuilder by optionally providing the types of the documents to look for
*/
public IdsQueryBuilder(@Nullable String... types) {
this.types = types;
}
/**
* Adds ids to the filter.
* Returns the types used in this query
*/
public String[] types() {
return this.types;
}
/**
* Adds ids to the query.
*/
public IdsQueryBuilder addIds(String... ids) {
values.addAll(Arrays.asList(ids));
Collections.addAll(this.ids, ids);
return this;
}
/**
* Adds ids to the filter.
* Adds ids to the query.
*/
public IdsQueryBuilder addIds(Collection<String> ids) {
values.addAll(ids);
this.ids.addAll(ids);
return this;
}
@ -75,48 +91,78 @@ public class IdsQueryBuilder extends QueryBuilder implements BoostableQueryBuild
}
/**
* Sets the boost for this query. Documents matching this query will (in addition to the normal
* weightings) have their score multiplied by the boost provided.
* Returns the ids for the query.
*/
@Override
public IdsQueryBuilder boost(float boost) {
this.boost = boost;
return this;
}
/**
* Sets the query name for the filter that can be used when searching for matched_filters per hit.
*/
public IdsQueryBuilder queryName(String queryName) {
this.queryName = queryName;
return this;
public Set<String> ids() {
return this.ids;
}
@Override
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(IdsQueryParser.NAME);
builder.startObject(NAME);
if (types != null) {
if (types.size() == 1) {
builder.field("type", types.get(0));
if (types.length == 1) {
builder.field("type", types[0]);
} else {
builder.startArray("types");
for (Object type : types) {
builder.value(type);
}
builder.endArray();
builder.array("types", types);
}
}
builder.startArray("values");
for (Object value : values) {
for (String value : ids) {
builder.value(value);
}
builder.endArray();
if (boost != -1) {
builder.field("boost", boost);
}
if (queryName != null) {
builder.field("_name", queryName);
}
printBoostAndQueryName(builder);
builder.endObject();
}
}
@Override
public String getWriteableName() {
return NAME;
}
@Override
protected Query doToQuery(QueryShardContext context) throws IOException {
Query query;
if (this.ids.isEmpty()) {
query = Queries.newMatchNoDocsQuery();
} else {
Collection<String> typesForQuery;
if (types == null || types.length == 0) {
typesForQuery = context.queryTypes();
} else if (types.length == 1 && MetaData.ALL.equals(types[0])) {
typesForQuery = context.mapperService().types();
} else {
typesForQuery = new HashSet<>();
Collections.addAll(typesForQuery, types);
}
query = new TermsQuery(UidFieldMapper.NAME, Uid.createUidsForTypesAndIds(typesForQuery, ids));
}
return query;
}
@Override
protected IdsQueryBuilder doReadFrom(StreamInput in) throws IOException {
IdsQueryBuilder idsQueryBuilder = new IdsQueryBuilder(in.readStringArray());
idsQueryBuilder.addIds(in.readStringArray());
return idsQueryBuilder;
}
@Override
protected void doWriteTo(StreamOutput out) throws IOException {
out.writeStringArray(types);
out.writeStringArray(ids.toArray(new String[ids.size()]));
}
@Override
protected int doHashCode() {
return Objects.hash(ids, Arrays.hashCode(types));
}
@Override
protected boolean doEquals(IdsQueryBuilder other) {
return Objects.equals(ids, other.ids) &&
Arrays.equals(types, other.types);
}
}

View File

@ -19,48 +19,36 @@
package org.elasticsearch.index.query;
import org.apache.lucene.queries.TermsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.util.iterable.Iterables;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
/**
*
* Parser for ids query
*/
public class IdsQueryParser implements QueryParser {
public static final String NAME = "ids";
@Inject
public IdsQueryParser() {
}
public class IdsQueryParser implements QueryParser<IdsQueryBuilder> {
@Override
public String[] names() {
return new String[]{NAME};
return new String[]{IdsQueryBuilder.NAME};
}
/**
* @return a QueryBuilder representation of the query passed in as XContent in the parse context
*/
@Override
public Query parse(QueryParseContext parseContext) throws IOException, ParsingException {
public IdsQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException {
XContentParser parser = parseContext.parser();
List<BytesRef> ids = new ArrayList<>();
Collection<String> types = null;
String currentFieldName = null;
float boost = 1.0f;
List<String> ids = new ArrayList<>();
List<String> types = new ArrayList<>();
float boost = AbstractQueryBuilder.DEFAULT_BOOST;
String queryName = null;
String currentFieldName = null;
XContentParser.Token token;
boolean idsProvided = false;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
@ -72,27 +60,26 @@ public class IdsQueryParser implements QueryParser {
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
if ((token == XContentParser.Token.VALUE_STRING) ||
(token == XContentParser.Token.VALUE_NUMBER)) {
BytesRef value = parser.utf8BytesOrNull();
if (value == null) {
throw new ParsingException(parseContext, "No value specified for term filter");
String id = parser.textOrNull();
if (id == null) {
throw new ParsingException(parser.getTokenLocation(), "No value specified for term filter");
}
ids.add(value);
ids.add(id);
} else {
throw new ParsingException(parseContext, "Illegal value for id, expecting a string or number, got: "
throw new ParsingException(parser.getTokenLocation(), "Illegal value for id, expecting a string or number, got: "
+ token);
}
}
} else if ("types".equals(currentFieldName) || "type".equals(currentFieldName)) {
types = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
String value = parser.textOrNull();
if (value == null) {
throw new ParsingException(parseContext, "No type specified for term filter");
throw new ParsingException(parser.getTokenLocation(), "No type specified for term filter");
}
types.add(value);
}
} else {
throw new ParsingException(parseContext, "[ids] query does not support [" + currentFieldName + "]");
throw new ParsingException(parser.getTokenLocation(), "[ids] query does not support [" + currentFieldName + "]");
}
} else if (token.isValue()) {
if ("type".equals(currentFieldName) || "_type".equals(currentFieldName)) {
@ -102,30 +89,22 @@ public class IdsQueryParser implements QueryParser {
} else if ("_name".equals(currentFieldName)) {
queryName = parser.text();
} else {
throw new ParsingException(parseContext, "[ids] query does not support [" + currentFieldName + "]");
throw new ParsingException(parser.getTokenLocation(), "[ids] query does not support [" + currentFieldName + "]");
}
}
}
if (!idsProvided) {
throw new ParsingException(parseContext, "[ids] query, no ids values provided");
throw new ParsingException(parser.getTokenLocation(), "[ids] query, no ids values provided");
}
if (ids.isEmpty()) {
return Queries.newMatchNoDocsQuery();
}
if (types == null || types.isEmpty()) {
types = parseContext.queryTypes();
} else if (types.size() == 1 && Iterables.getFirst(types, null).equals("_all")) {
types = parseContext.mapperService().types();
}
TermsQuery query = new TermsQuery(UidFieldMapper.NAME, Uid.createUidsForTypesAndIds(types, ids));
query.setBoost(boost);
if (queryName != null) {
parseContext.addNamedQuery(queryName, query);
}
IdsQueryBuilder query = new IdsQueryBuilder(types.toArray(new String[types.size()]));
query.addIds(ids.toArray(new String[ids.size()]));
query.boost(boost).queryName(queryName);
return query;
}
@Override
public IdsQueryBuilder getBuilderPrototype() {
return IdsQueryBuilder.PROTOTYPE;
}
}

View File

@ -22,12 +22,17 @@ package org.elasticsearch.index.query;
import org.apache.lucene.search.Query;
import org.apache.lucene.util.CloseableThreadLocal;
import org.elasticsearch.Version;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.client.Client;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper;
@ -40,6 +45,7 @@ import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
import org.elasticsearch.index.fielddata.IndexFieldDataService;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.internal.AllFieldMapper;
import org.elasticsearch.index.query.support.InnerHitsQueryParserHelper;
import org.elasticsearch.index.settings.IndexSettings;
import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
@ -51,13 +57,16 @@ public class IndexQueryParserService extends AbstractIndexComponent {
public static final String DEFAULT_FIELD = "index.query.default_field";
public static final String QUERY_STRING_LENIENT = "index.query_string.lenient";
public static final String QUERY_STRING_ANALYZE_WILDCARD = "indices.query.query_string.analyze_wildcard";
public static final String QUERY_STRING_ALLOW_LEADING_WILDCARD = "indices.query.query_string.allowLeadingWildcard";
public static final String PARSE_STRICT = "index.query.parse.strict";
public static final String ALLOW_UNMAPPED = "index.query.parse.allow_unmapped_fields";
private final InnerHitsQueryParserHelper innerHitsQueryParserHelper;
private CloseableThreadLocal<QueryParseContext> cache = new CloseableThreadLocal<QueryParseContext>() {
private CloseableThreadLocal<QueryShardContext> cache = new CloseableThreadLocal<QueryShardContext>() {
@Override
protected QueryParseContext initialValue() {
return new QueryParseContext(index, IndexQueryParserService.this);
protected QueryShardContext initialValue() {
return new QueryShardContext(index, IndexQueryParserService.this);
}
};
@ -71,24 +80,33 @@ public class IndexQueryParserService extends AbstractIndexComponent {
final IndexCache indexCache;
final IndexFieldDataService fieldDataService;
protected IndexFieldDataService fieldDataService;
final ClusterService clusterService;
final IndexNameExpressionResolver indexNameExpressionResolver;
final BitsetFilterCache bitsetFilterCache;
private final IndicesQueriesRegistry indicesQueriesRegistry;
private String defaultField;
private boolean queryStringLenient;
private final String defaultField;
private final boolean queryStringLenient;
private final boolean queryStringAnalyzeWildcard;
private final boolean queryStringAllowLeadingWildcard;
private final ParseFieldMatcher parseFieldMatcher;
private final boolean defaultAllowUnmappedFields;
private final Client client;
@Inject
public IndexQueryParserService(Index index, @IndexSettings Settings indexSettings,
public IndexQueryParserService(Index index, @IndexSettings Settings indexSettings, Settings settings,
IndicesQueriesRegistry indicesQueriesRegistry,
ScriptService scriptService, AnalysisService analysisService,
MapperService mapperService, IndexCache indexCache, IndexFieldDataService fieldDataService,
BitsetFilterCache bitsetFilterCache,
@Nullable SimilarityService similarityService) {
@Nullable SimilarityService similarityService, ClusterService clusterService,
IndexNameExpressionResolver indexNameExpressionResolver,
InnerHitsQueryParserHelper innerHitsQueryParserHelper, Client client) {
super(index, indexSettings);
this.scriptService = scriptService;
this.analysisService = analysisService;
@ -97,12 +115,18 @@ public class IndexQueryParserService extends AbstractIndexComponent {
this.indexCache = indexCache;
this.fieldDataService = fieldDataService;
this.bitsetFilterCache = bitsetFilterCache;
this.clusterService = clusterService;
this.indexNameExpressionResolver = indexNameExpressionResolver;
this.defaultField = indexSettings.get(DEFAULT_FIELD, AllFieldMapper.NAME);
this.queryStringLenient = indexSettings.getAsBoolean(QUERY_STRING_LENIENT, false);
this.queryStringAnalyzeWildcard = settings.getAsBoolean(QUERY_STRING_ANALYZE_WILDCARD, false);
this.queryStringAllowLeadingWildcard = settings.getAsBoolean(QUERY_STRING_ALLOW_LEADING_WILDCARD, true);
this.parseFieldMatcher = new ParseFieldMatcher(indexSettings);
this.defaultAllowUnmappedFields = indexSettings.getAsBoolean(ALLOW_UNMAPPED, true);
this.indicesQueriesRegistry = indicesQueriesRegistry;
this.innerHitsQueryParserHelper = innerHitsQueryParserHelper;
this.client = client;
}
public void close() {
@ -113,56 +137,24 @@ public class IndexQueryParserService extends AbstractIndexComponent {
return this.defaultField;
}
public boolean queryStringAnalyzeWildcard() {
return this.queryStringAnalyzeWildcard;
}
public boolean queryStringAllowLeadingWildcard() {
return this.queryStringAllowLeadingWildcard;
}
public boolean queryStringLenient() {
return this.queryStringLenient;
}
public QueryParser queryParser(String name) {
return indicesQueriesRegistry.queryParsers().get(name);
}
public ParsedQuery parse(QueryBuilder queryBuilder) {
XContentParser parser = null;
try {
BytesReference bytes = queryBuilder.buildAsBytes();
parser = XContentFactory.xContent(bytes).createParser(bytes);
return parse(cache.get(), parser);
} catch (ParsingException e) {
throw e;
} catch (Exception e) {
throw new ParsingException(getParseContext(), "Failed to parse", e);
} finally {
if (parser != null) {
parser.close();
}
}
}
public ParsedQuery parse(byte[] source) {
return parse(source, 0, source.length);
}
public ParsedQuery parse(byte[] source, int offset, int length) {
XContentParser parser = null;
try {
parser = XContentFactory.xContent(source, offset, length).createParser(source, offset, length);
return parse(cache.get(), parser);
} catch (ParsingException e) {
throw e;
} catch (Exception e) {
throw new ParsingException(getParseContext(), "Failed to parse", e);
} finally {
if (parser != null) {
parser.close();
}
}
IndicesQueriesRegistry indicesQueriesRegistry() {
return indicesQueriesRegistry;
}
public ParsedQuery parse(BytesReference source) {
return parse(cache.get(), source);
}
public ParsedQuery parse(QueryParseContext context, BytesReference source) {
QueryShardContext context = cache.get();
XContentParser parser = null;
try {
parser = XContentFactory.xContent(source).createParser(source);
@ -170,23 +162,7 @@ public class IndexQueryParserService extends AbstractIndexComponent {
} catch (ParsingException e) {
throw e;
} catch (Exception e) {
throw new ParsingException(context, "Failed to parse", e);
} finally {
if (parser != null) {
parser.close();
}
}
}
public ParsedQuery parse(String source) throws ParsingException {
XContentParser parser = null;
try {
parser = XContentFactory.xContent(source).createParser(source);
return innerParse(cache.get(), parser);
} catch (ParsingException e) {
throw e;
} catch (Exception e) {
throw new ParsingException(getParseContext(), "Failed to parse [" + source + "]", e);
throw new ParsingException(parser == null ? null : parser.getTokenLocation(), "Failed to parse", e);
} finally {
if (parser != null) {
parser.close();
@ -195,14 +171,10 @@ public class IndexQueryParserService extends AbstractIndexComponent {
}
public ParsedQuery parse(XContentParser parser) {
return parse(cache.get(), parser);
}
public ParsedQuery parse(QueryParseContext context, XContentParser parser) {
try {
return innerParse(context, parser);
} catch (IOException e) {
throw new ParsingException(context, "Failed to parse", e);
return innerParse(cache.get(), parser);
} catch(IOException e) {
throw new ParsingException(parser.getTokenLocation(), "Failed to parse", e);
}
}
@ -211,10 +183,11 @@ public class IndexQueryParserService extends AbstractIndexComponent {
*/
@Nullable
public ParsedQuery parseInnerFilter(XContentParser parser) throws IOException {
QueryParseContext context = cache.get();
QueryShardContext context = cache.get();
context.reset(parser);
try {
Query filter = context.parseInnerFilter();
context.parseFieldMatcher(parseFieldMatcher);
Query filter = context.parseContext().parseInnerQueryBuilder().toFilter(context);
if (filter == null) {
return null;
}
@ -225,27 +198,15 @@ public class IndexQueryParserService extends AbstractIndexComponent {
}
@Nullable
public Query parseInnerQuery(XContentParser parser) throws IOException {
QueryParseContext context = cache.get();
context.reset(parser);
try {
return context.parseInnerQuery();
} finally {
context.reset(null);
}
}
@Nullable
public Query parseInnerQuery(QueryParseContext parseContext) throws IOException {
parseContext.parseFieldMatcher(parseFieldMatcher);
Query query = parseContext.parseInnerQuery();
public Query parseInnerQuery(QueryShardContext context) throws IOException {
Query query = context.parseContext().parseInnerQueryBuilder().toQuery(context);
if (query == null) {
query = Queries.newMatchNoDocsQuery();
}
return query;
}
public QueryParseContext getParseContext() {
public QueryShardContext getShardContext() {
return cache.get();
}
@ -264,9 +225,10 @@ public class IndexQueryParserService extends AbstractIndexComponent {
* Selectively parses a query from a top level query or query_binary json field from the specified source.
*/
public ParsedQuery parseQuery(BytesReference source) {
XContentParser parser = null;
try {
parser = XContentHelper.createParser(source);
ParsedQuery parsedQuery = null;
XContentParser parser = XContentHelper.createParser(source);
for (XContentParser.Token token = parser.nextToken(); token != XContentParser.Token.END_OBJECT; token = parser.nextToken()) {
if (token == XContentParser.Token.FIELD_NAME) {
String fieldName = parser.currentName();
@ -277,37 +239,54 @@ public class IndexQueryParserService extends AbstractIndexComponent {
XContentParser qSourceParser = XContentFactory.xContent(querySource).createParser(querySource);
parsedQuery = parse(qSourceParser);
} else {
throw new ParsingException(getParseContext(), "request does not support [" + fieldName + "]");
throw new ParsingException(parser.getTokenLocation(), "request does not support [" + fieldName + "]");
}
}
}
if (parsedQuery != null) {
return parsedQuery;
if (parsedQuery == null) {
throw new ParsingException(parser.getTokenLocation(), "Required query is missing");
}
} catch (ParsingException e) {
return parsedQuery;
} catch (ParsingException | QueryShardException e) {
throw e;
} catch (Throwable e) {
throw new ParsingException(getParseContext(), "Failed to parse", e);
throw new ParsingException(parser == null ? null : parser.getTokenLocation(), "Failed to parse", e);
}
throw new ParsingException(getParseContext(), "Required query is missing");
}
private ParsedQuery innerParse(QueryParseContext parseContext, XContentParser parser) throws IOException, ParsingException {
parseContext.reset(parser);
private ParsedQuery innerParse(QueryShardContext context, XContentParser parser) throws IOException, QueryShardException {
context.reset(parser);
try {
parseContext.parseFieldMatcher(parseFieldMatcher);
Query query = parseContext.parseInnerQuery();
context.parseFieldMatcher(parseFieldMatcher);
Query query = context.parseContext().parseInnerQueryBuilder().toQuery(context);
if (query == null) {
query = Queries.newMatchNoDocsQuery();
}
return new ParsedQuery(query, parseContext.copyNamedQueries());
return new ParsedQuery(query, context.copyNamedQueries());
} finally {
parseContext.reset(null);
context.reset(null);
}
}
public ParseFieldMatcher parseFieldMatcher() {
return parseFieldMatcher;
}
public boolean matchesIndices(String... indices) {
final String[] concreteIndices = indexNameExpressionResolver.concreteIndices(clusterService.state(), IndicesOptions.lenientExpandOpen(), indices);
for (String index : concreteIndices) {
if (Regex.simpleMatch(index, this.index.name())) {
return true;
}
}
return false;
}
public InnerHitsQueryParserHelper getInnerHitsQueryParserHelper() {
return innerHitsQueryParserHelper;
}
public Client getClient() {
return client;
}
}

View File

@ -19,69 +19,133 @@
package org.elasticsearch.index.query;
import org.apache.lucene.search.Query;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder;
import java.io.IOException;
import java.util.Arrays;
import java.util.Objects;
/**
* A query that will execute the wrapped query only for the specified indices, and "match_all" when
* it does not match those indices (by default).
*/
public class IndicesQueryBuilder extends QueryBuilder {
public class IndicesQueryBuilder extends AbstractQueryBuilder<IndicesQueryBuilder> {
private final QueryBuilder queryBuilder;
public static final String NAME = "indices";
private final QueryBuilder innerQuery;
private final String[] indices;
private String sNoMatchQuery;
private QueryBuilder noMatchQuery;
private QueryBuilder noMatchQuery = defaultNoMatchQuery();
private String queryName;
static final IndicesQueryBuilder PROTOTYPE = new IndicesQueryBuilder(EmptyQueryBuilder.PROTOTYPE, "index");
public IndicesQueryBuilder(QueryBuilder queryBuilder, String... indices) {
this.queryBuilder = queryBuilder;
public IndicesQueryBuilder(QueryBuilder innerQuery, String... indices) {
if (innerQuery == null) {
throw new IllegalArgumentException("inner query cannot be null");
}
if (indices == null || indices.length == 0) {
throw new IllegalArgumentException("list of indices cannot be null or empty");
}
this.innerQuery = Objects.requireNonNull(innerQuery);
this.indices = indices;
}
/**
* Sets the no match query, can either be <tt>all</tt> or <tt>none</tt>.
*/
public IndicesQueryBuilder noMatchQuery(String type) {
this.sNoMatchQuery = type;
return this;
public QueryBuilder innerQuery() {
return this.innerQuery;
}
public String[] indices() {
return this.indices;
}
/**
* Sets the query to use when it executes on an index that does not match the indices provided.
*/
public IndicesQueryBuilder noMatchQuery(QueryBuilder noMatchQuery) {
if (noMatchQuery == null) {
throw new IllegalArgumentException("noMatch query cannot be null");
}
this.noMatchQuery = noMatchQuery;
return this;
}
/**
* Sets the query name for the filter that can be used when searching for matched_filters per hit.
* Sets the no match query, can either be <tt>all</tt> or <tt>none</tt>.
*/
public IndicesQueryBuilder queryName(String queryName) {
this.queryName = queryName;
public IndicesQueryBuilder noMatchQuery(String type) {
this.noMatchQuery = IndicesQueryParser.parseNoMatchQuery(type);
return this;
}
public QueryBuilder noMatchQuery() {
return this.noMatchQuery;
}
static QueryBuilder defaultNoMatchQuery() {
return QueryBuilders.matchAllQuery();
}
@Override
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(IndicesQueryParser.NAME);
builder.startObject(NAME);
builder.field("indices", indices);
builder.field("query");
queryBuilder.toXContent(builder, params);
if (noMatchQuery != null) {
builder.field("no_match_query");
noMatchQuery.toXContent(builder, params);
} else if (sNoMatchQuery != null) {
builder.field("no_match_query", sNoMatchQuery);
}
if (queryName != null) {
builder.field("_name", queryName);
}
innerQuery.toXContent(builder, params);
builder.field("no_match_query");
noMatchQuery.toXContent(builder, params);
printBoostAndQueryName(builder);
builder.endObject();
}
}
@Override
public String getWriteableName() {
return NAME;
}
@Override
protected Query doToQuery(QueryShardContext context) throws IOException {
if (context.matchesIndices(indices)) {
return innerQuery.toQuery(context);
}
return noMatchQuery.toQuery(context);
}
@Override
protected void setFinalBoost(Query query) {
if (boost != DEFAULT_BOOST) {
//if both the wrapped query and the wrapper hold a boost, the main one coming from the wrapper wins
query.setBoost(boost);
}
}
@Override
protected IndicesQueryBuilder doReadFrom(StreamInput in) throws IOException {
IndicesQueryBuilder indicesQueryBuilder = new IndicesQueryBuilder(in.readQuery(), in.readStringArray());
indicesQueryBuilder.noMatchQuery = in.readQuery();
return indicesQueryBuilder;
}
@Override
protected void doWriteTo(StreamOutput out) throws IOException {
out.writeQuery(innerQuery);
out.writeStringArray(indices);
out.writeQuery(noMatchQuery);
}
@Override
public int doHashCode() {
return Objects.hash(innerQuery, noMatchQuery, Arrays.hashCode(indices));
}
@Override
protected boolean doEquals(IndicesQueryBuilder other) {
return Objects.equals(innerQuery, other.innerQuery) &&
Arrays.equals(indices, other.indices) && // otherwise we are comparing pointers
Objects.equals(noMatchQuery, other.noMatchQuery);
}
}

View File

@ -19,147 +19,107 @@
package org.elasticsearch.index.query;
import org.apache.lucene.search.Query;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.support.XContentStructure;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
/**
* Parser for {@link IndicesQueryBuilder}.
*/
public class IndicesQueryParser implements QueryParser {
public static final String NAME = "indices";
private static final ParseField QUERY_FIELD = new ParseField("query", "filter");
private static final ParseField NO_MATCH_QUERY = new ParseField("no_match_query", "no_match_filter");
@Nullable
private final ClusterService clusterService;
private final IndexNameExpressionResolver indexNameExpressionResolver;
@Inject
public IndicesQueryParser(@Nullable ClusterService clusterService, IndexNameExpressionResolver indexNameExpressionResolver) {
this.clusterService = clusterService;
this.indexNameExpressionResolver = indexNameExpressionResolver;
}
@Override
public String[] names() {
return new String[]{NAME};
return new String[]{IndicesQueryBuilder.NAME};
}
@Override
public Query parse(QueryParseContext parseContext) throws IOException, ParsingException {
public QueryBuilder fromXContent(QueryParseContext parseContext) throws IOException, ParsingException {
XContentParser parser = parseContext.parser();
Query noMatchQuery = null;
boolean queryFound = false;
boolean indicesFound = false;
boolean currentIndexMatchesIndices = false;
QueryBuilder innerQuery = null;
Collection<String> indices = new ArrayList<>();
QueryBuilder noMatchQuery = IndicesQueryBuilder.defaultNoMatchQuery();
String queryName = null;
float boost = AbstractQueryBuilder.DEFAULT_BOOST;
String currentFieldName = null;
XContentParser.Token token;
XContentStructure.InnerQuery innerQuery = null;
XContentStructure.InnerQuery innerNoMatchQuery = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_OBJECT) {
if (parseContext.parseFieldMatcher().match(currentFieldName, QUERY_FIELD)) {
innerQuery = new XContentStructure.InnerQuery(parseContext, (String[])null);
queryFound = true;
innerQuery = parseContext.parseInnerQueryBuilder();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, NO_MATCH_QUERY)) {
innerNoMatchQuery = new XContentStructure.InnerQuery(parseContext, (String[])null);
noMatchQuery = parseContext.parseInnerQueryBuilder();
} else {
throw new ParsingException(parseContext, "[indices] query does not support [" + currentFieldName + "]");
throw new ParsingException(parser.getTokenLocation(), "[indices] query does not support [" + currentFieldName + "]");
}
} else if (token == XContentParser.Token.START_ARRAY) {
if ("indices".equals(currentFieldName)) {
if (indicesFound) {
throw new ParsingException(parseContext, "[indices] indices or index already specified");
if (indices.isEmpty() == false) {
throw new ParsingException(parser.getTokenLocation(), "[indices] indices or index already specified");
}
indicesFound = true;
Collection<String> indices = new ArrayList<>();
while (parser.nextToken() != XContentParser.Token.END_ARRAY) {
String value = parser.textOrNull();
if (value == null) {
throw new ParsingException(parseContext, "[indices] no value specified for 'indices' entry");
throw new ParsingException(parser.getTokenLocation(), "[indices] no value specified for 'indices' entry");
}
indices.add(value);
}
currentIndexMatchesIndices = matchesIndices(parseContext.index().name(), indices.toArray(new String[indices.size()]));
} else {
throw new ParsingException(parseContext, "[indices] query does not support [" + currentFieldName + "]");
throw new ParsingException(parser.getTokenLocation(), "[indices] query does not support [" + currentFieldName + "]");
}
} else if (token.isValue()) {
if ("index".equals(currentFieldName)) {
if (indicesFound) {
throw new ParsingException(parseContext, "[indices] indices or index already specified");
if (indices.isEmpty() == false) {
throw new ParsingException(parser.getTokenLocation(), "[indices] indices or index already specified");
}
indicesFound = true;
currentIndexMatchesIndices = matchesIndices(parseContext.index().name(), parser.text());
indices.add(parser.text());
} else if (parseContext.parseFieldMatcher().match(currentFieldName, NO_MATCH_QUERY)) {
String type = parser.text();
if ("all".equals(type)) {
noMatchQuery = Queries.newMatchAllQuery();
} else if ("none".equals(type)) {
noMatchQuery = Queries.newMatchNoDocsQuery();
}
noMatchQuery = parseNoMatchQuery(parser.text());
} else if ("_name".equals(currentFieldName)) {
queryName = parser.text();
} else if ("boost".equals(currentFieldName)) {
boost = parser.floatValue();
} else {
throw new ParsingException(parseContext, "[indices] query does not support [" + currentFieldName + "]");
throw new ParsingException(parser.getTokenLocation(), "[indices] query does not support [" + currentFieldName + "]");
}
}
}
if (!queryFound) {
throw new ParsingException(parseContext, "[indices] requires 'query' element");
}
if (!indicesFound) {
throw new ParsingException(parseContext, "[indices] requires 'indices' or 'index' element");
}
Query chosenQuery;
if (currentIndexMatchesIndices) {
chosenQuery = innerQuery.asQuery();
} else {
// If noMatchQuery is set, it means "no_match_query" was "all" or "none"
if (noMatchQuery != null) {
chosenQuery = noMatchQuery;
} else {
// There might be no "no_match_query" set, so default to the match_all if not set
if (innerNoMatchQuery == null) {
chosenQuery = Queries.newMatchAllQuery();
} else {
chosenQuery = innerNoMatchQuery.asQuery();
}
}
if (innerQuery == null) {
throw new ParsingException(parser.getTokenLocation(), "[indices] requires 'query' element");
}
if (queryName != null) {
parseContext.addNamedQuery(queryName, chosenQuery);
if (indices.isEmpty()) {
throw new ParsingException(parser.getTokenLocation(), "[indices] requires 'indices' or 'index' element");
}
return chosenQuery;
return new IndicesQueryBuilder(innerQuery, indices.toArray(new String[indices.size()]))
.noMatchQuery(noMatchQuery)
.boost(boost)
.queryName(queryName);
}
protected boolean matchesIndices(String currentIndex, String... indices) {
final String[] concreteIndices = indexNameExpressionResolver.concreteIndices(clusterService.state(), IndicesOptions.lenientExpandOpen(), indices);
for (String index : concreteIndices) {
if (Regex.simpleMatch(index, currentIndex)) {
return true;
}
static QueryBuilder parseNoMatchQuery(String type) {
if ("all".equals(type)) {
return QueryBuilders.matchAllQuery();
} else if ("none".equals(type)) {
return new MatchNoneQueryBuilder();
}
return false;
throw new IllegalArgumentException("query type can only be [all] or [none] but not " + "[" + type + "]");
}
@Override
public IndicesQueryBuilder getBuilderPrototype() {
return IndicesQueryBuilder.PROTOTYPE;
}
}

View File

@ -19,6 +19,10 @@
package org.elasticsearch.index.query;
import org.apache.lucene.search.Query;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.xcontent.XContentBuilder;
import java.io.IOException;
@ -26,26 +30,46 @@ import java.io.IOException;
/**
* A query that matches on all documents.
*/
public class MatchAllQueryBuilder extends QueryBuilder implements BoostableQueryBuilder<MatchAllQueryBuilder> {
public class MatchAllQueryBuilder extends AbstractQueryBuilder<MatchAllQueryBuilder> {
private float boost = -1;
public static final String NAME = "match_all";
/**
* Sets the boost for this query. Documents matching this query will (in addition to the normal
* weightings) have their score multiplied by the boost provided.
*/
@Override
public MatchAllQueryBuilder boost(float boost) {
this.boost = boost;
return this;
}
static final MatchAllQueryBuilder PROTOTYPE = new MatchAllQueryBuilder();
@Override
public void doXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(MatchAllQueryParser.NAME);
if (boost != -1) {
builder.field("boost", boost);
}
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(NAME);
printBoostAndQueryName(builder);
builder.endObject();
}
}
@Override
protected Query doToQuery(QueryShardContext context) throws IOException {
return Queries.newMatchAllQuery();
}
@Override
protected boolean doEquals(MatchAllQueryBuilder other) {
return true;
}
@Override
protected int doHashCode() {
return 0;
}
@Override
protected MatchAllQueryBuilder doReadFrom(StreamInput in) throws IOException {
return new MatchAllQueryBuilder();
}
@Override
protected void doWriteTo(StreamOutput out) throws IOException {
//nothing to write really
}
@Override
public String getWriteableName() {
return NAME;
}
}

View File

@ -19,58 +19,51 @@
package org.elasticsearch.index.query;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.Query;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
/**
*
* Parser for match_all query
*/
public class MatchAllQueryParser implements QueryParser {
public static final String NAME = "match_all";
@Inject
public MatchAllQueryParser() {
}
public class MatchAllQueryParser implements QueryParser<MatchAllQueryBuilder> {
@Override
public String[] names() {
return new String[]{NAME, Strings.toCamelCase(NAME)};
return new String[]{MatchAllQueryBuilder.NAME, Strings.toCamelCase(MatchAllQueryBuilder.NAME)};
}
@Override
public Query parse(QueryParseContext parseContext) throws IOException, ParsingException {
public MatchAllQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException {
XContentParser parser = parseContext.parser();
float boost = 1.0f;
String currentFieldName = null;
XContentParser.Token token;
String queryName = null;
float boost = AbstractQueryBuilder.DEFAULT_BOOST;
while (((token = parser.nextToken()) != XContentParser.Token.END_OBJECT && token != XContentParser.Token.END_ARRAY)) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token.isValue()) {
if ("boost".equals(currentFieldName)) {
if ("_name".equals(currentFieldName)) {
queryName = parser.text();
} else if ("boost".equals(currentFieldName)) {
boost = parser.floatValue();
} else {
throw new ParsingException(parseContext, "[match_all] query does not support [" + currentFieldName + "]");
throw new ParsingException(parser.getTokenLocation(), "[match_all] query does not support [" + currentFieldName + "]");
}
}
}
if (boost == 1.0f) {
return Queries.newMatchAllQuery();
}
MatchAllDocsQuery query = new MatchAllDocsQuery();
query.setBoost(boost);
return query;
MatchAllQueryBuilder queryBuilder = new MatchAllQueryBuilder();
queryBuilder.boost(boost);
queryBuilder.queryName(queryName);
return queryBuilder;
}
}
@Override
public MatchAllQueryBuilder getBuilderPrototype() {
return MatchAllQueryBuilder.PROTOTYPE;
}
}

View File

@ -0,0 +1,79 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.query;
import org.apache.lucene.search.Query;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.xcontent.XContentBuilder;
import java.io.IOException;
/**
* A query that matches no document.
*/
public class MatchNoneQueryBuilder extends AbstractQueryBuilder<MatchNoneQueryBuilder> {
public static final String NAME = "match_none";
public static final MatchNoneQueryBuilder PROTOTYPE = new MatchNoneQueryBuilder();
@Override
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(NAME);
builder.endObject();
}
@Override
protected Query doToQuery(QueryShardContext context) throws IOException {
return Queries.newMatchNoDocsQuery();
}
@Override
protected void setFinalBoost(Query query) {
//no-op this query doesn't support boost
}
@Override
protected boolean doEquals(MatchNoneQueryBuilder other) {
return true;
}
@Override
protected int doHashCode() {
return 0;
}
@Override
protected MatchNoneQueryBuilder doReadFrom(StreamInput in) throws IOException {
return new MatchNoneQueryBuilder();
}
@Override
protected void doWriteTo(StreamOutput out) throws IOException {
//nothing to write really
}
@Override
public String getWriteableName() {
return NAME;
}
}

Some files were not shown because too many files have changed in this diff Show More