Merge pull request #12527 from cbuescher/feature/query-refactoring-queryCreationContext

Separating QueryParseContext and QueryShardContext
This commit is contained in:
Christoph Büscher 2015-08-05 16:24:50 +02:00
commit 07c2e48e96
148 changed files with 1204 additions and 969 deletions

View File

@ -22,7 +22,7 @@ package org.apache.lucene.queryparser.classic;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.Query;
import org.elasticsearch.index.query.ExistsQueryBuilder;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext;
/**
*
@ -32,7 +32,7 @@ public class ExistsFieldQueryExtension implements FieldQueryExtension {
public static final String NAME = "_exists_";
@Override
public Query query(QueryParseContext parseContext, String queryText) {
return new ConstantScoreQuery(ExistsQueryBuilder.newFilter(parseContext, queryText));
public Query query(QueryShardContext context, String queryText) {
return new ConstantScoreQuery(ExistsQueryBuilder.newFilter(context, queryText));
}
}

View File

@ -20,12 +20,12 @@
package org.apache.lucene.queryparser.classic;
import org.apache.lucene.search.Query;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext;
/**
*
*/
public interface FieldQueryExtension {
Query query(QueryParseContext parseContext, String queryText);
Query query(QueryShardContext context, String queryText);
}

View File

@ -39,7 +39,7 @@ import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.core.DateFieldMapper;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.query.support.QueryParsers;
import com.google.common.base.Objects;
@ -70,7 +70,7 @@ public class MapperQueryParser extends QueryParser {
.build();
}
private final QueryParseContext parseContext;
private final QueryShardContext context;
private QueryParserSettings settings;
@ -85,15 +85,9 @@ public class MapperQueryParser extends QueryParser {
private String quoteFieldSuffix;
public MapperQueryParser(QueryParseContext parseContext) {
public MapperQueryParser(QueryShardContext context) {
super(null, null);
this.parseContext = parseContext;
}
public MapperQueryParser(QueryParserSettings settings, QueryParseContext parseContext) {
super(settings.defaultField(), settings.defaultAnalyzer());
this.parseContext = parseContext;
reset(settings);
this.context = context;
}
public void reset(QueryParserSettings settings) {
@ -168,7 +162,7 @@ public class MapperQueryParser extends QueryParser {
public Query getFieldQuery(String field, String queryText, boolean quoted) throws ParseException {
FieldQueryExtension fieldQueryExtension = fieldQueryExtensions.get(field);
if (fieldQueryExtension != null) {
return fieldQueryExtension.query(parseContext, queryText);
return fieldQueryExtension.query(context, queryText);
}
Collection<String> fields = extractMultiFields(field);
if (fields != null) {
@ -232,27 +226,27 @@ public class MapperQueryParser extends QueryParser {
if (quoted) {
setAnalyzer(quoteAnalyzer);
if (quoteFieldSuffix != null) {
currentFieldType = parseContext.fieldMapper(field + quoteFieldSuffix);
currentFieldType = context.fieldMapper(field + quoteFieldSuffix);
}
}
if (currentFieldType == null) {
currentFieldType = parseContext.fieldMapper(field);
currentFieldType = context.fieldMapper(field);
}
if (currentFieldType != null) {
if (quoted) {
if (!forcedQuoteAnalyzer) {
setAnalyzer(parseContext.getSearchQuoteAnalyzer(currentFieldType));
setAnalyzer(context.getSearchQuoteAnalyzer(currentFieldType));
}
} else {
if (!forcedAnalyzer) {
setAnalyzer(parseContext.getSearchAnalyzer(currentFieldType));
setAnalyzer(context.getSearchAnalyzer(currentFieldType));
}
}
if (currentFieldType != null) {
Query query = null;
if (currentFieldType.useTermQueryWithQueryString()) {
try {
query = currentFieldType.termQuery(queryText, parseContext);
query = currentFieldType.termQuery(queryText, context);
} catch (RuntimeException e) {
if (settings.lenient()) {
return null;
@ -363,7 +357,7 @@ public class MapperQueryParser extends QueryParser {
}
private Query getRangeQuerySingle(String field, String part1, String part2, boolean startInclusive, boolean endInclusive) {
currentFieldType = parseContext.fieldMapper(field);
currentFieldType = context.fieldMapper(field);
if (currentFieldType != null) {
if (lowercaseExpandedTerms && !currentFieldType.isNumeric()) {
part1 = part1 == null ? null : part1.toLowerCase(locale);
@ -428,7 +422,7 @@ public class MapperQueryParser extends QueryParser {
}
private Query getFuzzyQuerySingle(String field, String termStr, String minSimilarity) throws ParseException {
currentFieldType = parseContext.fieldMapper(field);
currentFieldType = context.fieldMapper(field);
if (currentFieldType != null) {
try {
return currentFieldType.fuzzyQuery(termStr, Fuzziness.build(minSimilarity), fuzzyPrefixLength, settings.fuzzyMaxExpansions(), FuzzyQuery.defaultTranspositions);
@ -498,14 +492,14 @@ public class MapperQueryParser extends QueryParser {
currentFieldType = null;
Analyzer oldAnalyzer = getAnalyzer();
try {
currentFieldType = parseContext.fieldMapper(field);
currentFieldType = context.fieldMapper(field);
if (currentFieldType != null) {
if (!forcedAnalyzer) {
setAnalyzer(parseContext.getSearchAnalyzer(currentFieldType));
setAnalyzer(context.getSearchAnalyzer(currentFieldType));
}
Query query = null;
if (currentFieldType.useTermQueryWithQueryString()) {
query = currentFieldType.prefixQuery(termStr, multiTermRewriteMethod, parseContext);
query = currentFieldType.prefixQuery(termStr, multiTermRewriteMethod, context);
}
if (query == null) {
query = getPossiblyAnalyzedPrefixQuery(currentFieldType.names().indexName(), termStr);
@ -590,7 +584,7 @@ public class MapperQueryParser extends QueryParser {
return newMatchAllDocsQuery();
}
// effectively, we check if a field exists or not
return fieldQueryExtensions.get(ExistsFieldQueryExtension.NAME).query(parseContext, actualField);
return fieldQueryExtensions.get(ExistsFieldQueryExtension.NAME).query(context, actualField);
}
}
if (lowercaseExpandedTerms) {
@ -639,10 +633,10 @@ public class MapperQueryParser extends QueryParser {
currentFieldType = null;
Analyzer oldAnalyzer = getAnalyzer();
try {
currentFieldType = parseContext.fieldMapper(field);
currentFieldType = context.fieldMapper(field);
if (currentFieldType != null) {
if (!forcedAnalyzer) {
setAnalyzer(parseContext.getSearchAnalyzer(currentFieldType));
setAnalyzer(context.getSearchAnalyzer(currentFieldType));
}
indexedNameField = currentFieldType.names().indexName();
return getPossiblyAnalyzedWildcardQuery(indexedNameField, termStr);
@ -780,14 +774,14 @@ public class MapperQueryParser extends QueryParser {
currentFieldType = null;
Analyzer oldAnalyzer = getAnalyzer();
try {
currentFieldType = parseContext.fieldMapper(field);
currentFieldType = context.fieldMapper(field);
if (currentFieldType != null) {
if (!forcedAnalyzer) {
setAnalyzer(parseContext.getSearchAnalyzer(currentFieldType));
setAnalyzer(context.getSearchAnalyzer(currentFieldType));
}
Query query = null;
if (currentFieldType.useTermQueryWithQueryString()) {
query = currentFieldType.regexpQuery(termStr, RegExp.ALL, maxDeterminizedStates, multiTermRewriteMethod, parseContext);
query = currentFieldType.regexpQuery(termStr, RegExp.ALL, maxDeterminizedStates, multiTermRewriteMethod, context);
}
if (query == null) {
query = super.getRegexpQuery(field, termStr);
@ -835,7 +829,7 @@ public class MapperQueryParser extends QueryParser {
private Collection<String> extractMultiFields(String field) {
Collection<String> fields = null;
if (field != null) {
fields = parseContext.simpleMatchToIndexNames(field);
fields = context.simpleMatchToIndexNames(field);
} else {
fields = settings.fields();
}

View File

@ -22,8 +22,7 @@ package org.apache.lucene.queryparser.classic;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.Query;
import org.elasticsearch.index.query.MissingQueryBuilder;
import org.elasticsearch.index.query.MissingQueryParser;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext;
/**
*
@ -33,8 +32,8 @@ public class MissingFieldQueryExtension implements FieldQueryExtension {
public static final String NAME = "_missing_";
@Override
public Query query(QueryParseContext parseContext, String queryText) {
Query query = MissingQueryBuilder.newFilter(parseContext, queryText, MissingQueryBuilder.DEFAULT_EXISTENCE_VALUE, MissingQueryBuilder.DEFAULT_NULL_VALUE);
public Query query(QueryShardContext context, String queryText) {
Query query = MissingQueryBuilder.newFilter(context, queryText, MissingQueryBuilder.DEFAULT_EXISTENCE_VALUE, MissingQueryBuilder.DEFAULT_NULL_VALUE);
if (query != null) {
return new ConstantScoreQuery(query);
}

View File

@ -572,6 +572,7 @@ public class ElasticsearchException extends RuntimeException implements ToXConte
org.elasticsearch.index.engine.RecoveryEngineException.class,
org.elasticsearch.common.blobstore.BlobStoreException.class,
org.elasticsearch.index.snapshots.IndexShardRestoreException.class,
org.elasticsearch.index.query.QueryShardException.class,
org.elasticsearch.index.query.QueryParsingException.class,
org.elasticsearch.action.support.replication.TransportReplicationAction.RetryOnPrimaryException.class,
org.elasticsearch.index.engine.DeleteByQueryFailedEngineException.class,

View File

@ -42,6 +42,7 @@ import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.query.IndexQueryParserService;
import org.elasticsearch.index.query.QueryShardException;
import org.elasticsearch.index.query.QueryParsingException;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.indices.IndicesService;
@ -190,7 +191,7 @@ public class TransportValidateQueryAction extends TransportBroadcastAction<Valid
if (request.rewrite()) {
explanation = getRewrittenQuery(searcher.searcher(), searchContext.query());
}
} catch (QueryParsingException e) {
} catch (QueryShardException|QueryParsingException e) {
valid = false;
error = e.getDetailedMessage();
} catch (AssertionError|IOException e) {

View File

@ -41,7 +41,7 @@ import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.script.ScriptService;
@ -166,10 +166,10 @@ public class TransportExistsAction extends TransportBroadcastAction<ExistsReques
BytesReference source = request.querySource();
if (source != null && source.length() > 0) {
try {
QueryParseContext.setTypes(request.types());
QueryShardContext.setTypes(request.types());
context.parsedQuery(indexService.queryParserService().parseQuery(source));
} finally {
QueryParseContext.removeTypes();
QueryShardContext.removeTypes();
}
}
context.preProcess();

View File

@ -28,7 +28,7 @@ import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.query.IndexQueryParserService;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.indices.InvalidAliasNameException;
import java.io.IOException;
@ -142,10 +142,10 @@ public class AliasValidator extends AbstractComponent {
}
private void validateAliasFilter(XContentParser parser, IndexQueryParserService indexQueryParserService) throws IOException {
QueryParseContext context = indexQueryParserService.getParseContext();
QueryShardContext context = indexQueryParserService.getShardContext();
try {
context.reset(parser);
context.parseInnerFilter();
context.parseContext().parseInnerFilter();
} finally {
context.reset(null);
parser.close();

View File

@ -33,7 +33,7 @@ import org.elasticsearch.common.lucene.BytesRefs;
import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.similarity.SimilarityProvider;
import java.io.IOException;
@ -425,7 +425,7 @@ public abstract class MappedFieldType extends FieldType {
}
/**
* Should the field query {@link #termQuery(Object, org.elasticsearch.index.query.QueryParseContext)} be used when detecting this
* Should the field query {@link #termQuery(Object, org.elasticsearch.index.query.QueryShardContext)} be used when detecting this
* field in query string.
*/
public boolean useTermQueryWithQueryString() {
@ -437,11 +437,11 @@ public abstract class MappedFieldType extends FieldType {
return new Term(names().indexName(), indexedValueForSearch(value));
}
public Query termQuery(Object value, @Nullable QueryParseContext context) {
public Query termQuery(Object value, @Nullable QueryShardContext context) {
return new TermQuery(createTerm(value));
}
public Query termsQuery(List values, @Nullable QueryParseContext context) {
public Query termsQuery(List values, @Nullable QueryShardContext context) {
BytesRef[] bytesRefs = new BytesRef[values.size()];
for (int i = 0; i < bytesRefs.length; i++) {
bytesRefs[i] = indexedValueForSearch(values.get(i));
@ -460,7 +460,7 @@ public abstract class MappedFieldType extends FieldType {
return new FuzzyQuery(createTerm(value), fuzziness.asDistance(BytesRefs.toString(value)), prefixLength, maxExpansions, transpositions);
}
public Query prefixQuery(String value, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryParseContext context) {
public Query prefixQuery(String value, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryShardContext context) {
PrefixQuery query = new PrefixQuery(createTerm(value));
if (method != null) {
query.setRewriteMethod(method);
@ -468,7 +468,7 @@ public abstract class MappedFieldType extends FieldType {
return query;
}
public Query regexpQuery(String value, int flags, int maxDeterminizedStates, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryParseContext context) {
public Query regexpQuery(String value, int flags, int maxDeterminizedStates, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryShardContext context) {
RegexpQuery query = new RegexpQuery(createTerm(value), flags, maxDeterminizedStates);
if (method != null) {
query.setRewriteMethod(method);

View File

@ -40,7 +40,7 @@ import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.MetadataFieldMapper;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.similarity.SimilarityLookupService;
import java.io.IOException;
@ -186,7 +186,7 @@ public class AllFieldMapper extends MetadataFieldMapper {
}
@Override
public Query termQuery(Object value, QueryParseContext context) {
public Query termQuery(Object value, QueryShardContext context) {
return queryStringTermQuery(createTerm(value));
}
}

View File

@ -49,7 +49,7 @@ import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.MetadataFieldMapper;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext;
import java.io.IOException;
import java.util.Collection;
@ -167,7 +167,7 @@ public class IdFieldMapper extends MetadataFieldMapper {
}
@Override
public Query termQuery(Object value, @Nullable QueryParseContext context) {
public Query termQuery(Object value, @Nullable QueryShardContext context) {
if (indexOptions() != IndexOptions.NONE || context == null) {
return super.termQuery(value, context);
}
@ -176,7 +176,7 @@ public class IdFieldMapper extends MetadataFieldMapper {
}
@Override
public Query termsQuery(List values, @Nullable QueryParseContext context) {
public Query termsQuery(List values, @Nullable QueryShardContext context) {
if (indexOptions() != IndexOptions.NONE || context == null) {
return super.termsQuery(values, context);
}
@ -184,7 +184,7 @@ public class IdFieldMapper extends MetadataFieldMapper {
}
@Override
public Query prefixQuery(String value, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryParseContext context) {
public Query prefixQuery(String value, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryShardContext context) {
if (indexOptions() != IndexOptions.NONE || context == null) {
return super.prefixQuery(value, method, context);
}
@ -201,7 +201,7 @@ public class IdFieldMapper extends MetadataFieldMapper {
}
@Override
public Query regexpQuery(String value, int flags, int maxDeterminizedStates, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryParseContext context) {
public Query regexpQuery(String value, int flags, int maxDeterminizedStates, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryShardContext context) {
if (indexOptions() != IndexOptions.NONE || context == null) {
return super.regexpQuery(value, flags, maxDeterminizedStates, method, context);
}

View File

@ -38,7 +38,7 @@ import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.MetadataFieldMapper;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext;
import java.io.IOException;
import java.util.Iterator;
@ -157,7 +157,7 @@ public class IndexFieldMapper extends MetadataFieldMapper {
* indices
*/
@Override
public Query termQuery(Object value, @Nullable QueryParseContext context) {
public Query termQuery(Object value, @Nullable QueryShardContext context) {
if (context == null) {
return super.termQuery(value, context);
}
@ -171,7 +171,7 @@ public class IndexFieldMapper extends MetadataFieldMapper {
@Override
public Query termsQuery(List values, QueryParseContext context) {
public Query termsQuery(List values, QueryShardContext context) {
if (context == null) {
return super.termsQuery(values, context);
}

View File

@ -43,7 +43,7 @@ import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.MetadataFieldMapper;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext;
import java.io.IOException;
import java.util.ArrayList;
@ -189,12 +189,12 @@ public class ParentFieldMapper extends MetadataFieldMapper {
}
@Override
public Query termQuery(Object value, @Nullable QueryParseContext context) {
public Query termQuery(Object value, @Nullable QueryShardContext context) {
return termsQuery(Collections.singletonList(value), context);
}
@Override
public Query termsQuery(List values, @Nullable QueryParseContext context) {
public Query termsQuery(List values, @Nullable QueryShardContext context) {
if (context == null) {
return super.termsQuery(values, context);
}

View File

@ -43,7 +43,7 @@ import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.MetadataFieldMapper;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext;
import java.io.IOException;
import java.util.List;
@ -137,7 +137,7 @@ public class TypeFieldMapper extends MetadataFieldMapper {
}
@Override
public Query termQuery(Object value, @Nullable QueryParseContext context) {
public Query termQuery(Object value, @Nullable QueryShardContext context) {
if (indexOptions() == IndexOptions.NONE) {
return new ConstantScoreQuery(new PrefixQuery(new Term(UidFieldMapper.NAME, Uid.typePrefixAsBytes(BytesRefs.toBytesRef(value)))));
}

View File

@ -41,7 +41,7 @@ import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.internal.TypeFieldMapper;
import org.elasticsearch.index.percolator.stats.ShardPercolateService;
import org.elasticsearch.index.query.IndexQueryParserService;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.query.QueryParsingException;
import org.elasticsearch.index.settings.IndexSettings;
import org.elasticsearch.index.shard.AbstractIndexShardComponent;
@ -184,12 +184,13 @@ public class PercolatorQueriesRegistry extends AbstractIndexShardComponent imple
}
}
//norelease this method parses from xcontent to lucene query, need to re-investigate how to split context here
private Query parseQuery(String type, XContentParser parser) {
String[] previousTypes = null;
if (type != null) {
QueryParseContext.setTypesWithPrevious(new String[]{type});
QueryShardContext.setTypesWithPrevious(new String[]{type});
}
QueryParseContext context = queryParserService.getParseContext();
QueryShardContext context = queryParserService.getShardContext();
try {
context.reset(parser);
// This means that fields in the query need to exist in the mapping prior to registering this query
@ -208,10 +209,10 @@ public class PercolatorQueriesRegistry extends AbstractIndexShardComponent imple
context.setMapUnmappedFieldAsString(mapUnmappedFieldsAsString ? true : false);
return queryParserService.parseInnerQuery(context);
} catch (IOException e) {
throw new QueryParsingException(context, "Failed to parse", e);
throw new QueryParsingException(context.parseContext(), "Failed to parse", e);
} finally {
if (type != null) {
QueryParseContext.setTypes(previousTypes);
QueryShardContext.setTypes(previousTypes);
}
context.reset(null);
}

View File

@ -68,20 +68,20 @@ public abstract class AbstractQueryBuilder<QB extends AbstractQueryBuilder> exte
}
@Override
public final Query toQuery(QueryParseContext parseContext) throws IOException {
Query query = doToQuery(parseContext);
public final Query toQuery(QueryShardContext context) throws IOException {
Query query = doToQuery(context);
if (query != null) {
query.setBoost(boost);
if (queryName != null) {
parseContext.addNamedQuery(queryName, query);
context.addNamedQuery(queryName, query);
}
}
return query;
}
//norelease to be made abstract once all query builders override doToQuery providing their own specific implementation.
protected Query doToQuery(QueryParseContext parseContext) throws IOException {
return parseContext.indexQueryParserService().queryParser(getName()).parse(parseContext);
protected Query doToQuery(QueryShardContext context) throws IOException {
return context.indexQueryParserService().queryParser(getName()).parse(context);
}
@Override
@ -219,17 +219,17 @@ public abstract class AbstractQueryBuilder<QB extends AbstractQueryBuilder> exte
/**
* Helper method to convert collection of {@link QueryBuilder} instances to lucene
* {@link Query} instances. {@link QueryBuilder} that return <tt>null</tt> calling
* their {@link QueryBuilder#toQuery(QueryParseContext)} method are not added to the
* their {@link QueryBuilder#toQuery(QueryShardContext)} method are not added to the
* resulting collection.
*
* @throws IOException
* @throws QueryParsingException
* @throws QueryShardException
*/
protected static Collection<Query> toQueries(Collection<QueryBuilder> queryBuilders, QueryParseContext parseContext) throws QueryParsingException,
protected static Collection<Query> toQueries(Collection<QueryBuilder> queryBuilders, QueryShardContext context) throws QueryShardException,
IOException {
List<Query> queries = new ArrayList<>(queryBuilders.size());
for (QueryBuilder queryBuilder : queryBuilders) {
Query query = queryBuilder.toQuery(parseContext);
Query query = queryBuilder.toQuery(context);
if (query != null) {
queries.add(query);
}

View File

@ -84,7 +84,7 @@ public class AndQueryBuilder extends AbstractQueryBuilder<AndQueryBuilder> {
}
@Override
protected Query doToQuery(QueryParseContext parseContext) throws IOException {
protected Query doToQuery(QueryShardContext context) throws IOException {
if (filters.isEmpty()) {
// no filters provided, this should be ignored upstream
return null;
@ -92,7 +92,7 @@ public class AndQueryBuilder extends AbstractQueryBuilder<AndQueryBuilder> {
BooleanQuery query = new BooleanQuery();
for (QueryBuilder f : filters) {
Query innerQuery = f.toQuery(parseContext);
Query innerQuery = f.toQuery(context);
// ignore queries that are null
if (innerQuery != null) {
query.add(innerQuery, Occur.MUST);

View File

@ -26,14 +26,14 @@ import java.io.IOException;
/**
* Class used during the query parsers refactoring. Will be removed once we can parse search requests on the coordinating node.
* All query parsers that have a refactored "fromXContent" method can be changed to extend this instead of {@link BaseQueryParserTemp}.
* Keeps old {@link QueryParser#parse(QueryParseContext)} method as a stub delegating to
* {@link QueryParser#fromXContent(QueryParseContext)} and {@link QueryBuilder#toQuery(QueryParseContext)}}
* Keeps old {@link QueryParser#parse(QueryShardContext)} method as a stub delegating to
* {@link QueryParser#fromXContent(QueryShardContext)} and {@link QueryBuilder#toQuery(QueryShardContext)}}
*/
//norelease needs to be removed once we parse search requests on the coordinating node, as the parse method is not needed anymore at that point.
public abstract class BaseQueryParser implements QueryParser {
@Override
public final Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
return fromXContent(parseContext).toQuery(parseContext);
public final Query parse(QueryShardContext context) throws IOException, QueryParsingException {
return fromXContent(context.parseContext()).toQuery(context);
}
}

View File

@ -33,7 +33,7 @@ public abstract class BaseQueryParserTemp implements QueryParser {
@Override
public QueryBuilder fromXContent(QueryParseContext parseContext) throws IOException, QueryParsingException {
Query query = parse(parseContext);
Query query = parse(parseContext.shardContext());
return new QueryWrappingQueryBuilder(query);
}
}

View File

@ -254,12 +254,12 @@ public class BoolQueryBuilder extends AbstractQueryBuilder<BoolQueryBuilder> {
}
@Override
protected Query doToQuery(QueryParseContext parseContext) throws IOException {
protected Query doToQuery(QueryShardContext context) throws IOException {
BooleanQuery booleanQuery = new BooleanQuery(disableCoord);
addBooleanClauses(parseContext, booleanQuery, mustClauses, BooleanClause.Occur.MUST);
addBooleanClauses(parseContext, booleanQuery, mustNotClauses, BooleanClause.Occur.MUST_NOT);
addBooleanClauses(parseContext, booleanQuery, shouldClauses, BooleanClause.Occur.SHOULD);
addBooleanClauses(parseContext, booleanQuery, filterClauses, BooleanClause.Occur.FILTER);
addBooleanClauses(context, booleanQuery, mustClauses, BooleanClause.Occur.MUST);
addBooleanClauses(context, booleanQuery, mustNotClauses, BooleanClause.Occur.MUST_NOT);
addBooleanClauses(context, booleanQuery, shouldClauses, BooleanClause.Occur.SHOULD);
addBooleanClauses(context, booleanQuery, filterClauses, BooleanClause.Occur.FILTER);
if (booleanQuery.clauses().isEmpty()) {
return new MatchAllDocsQuery();
@ -279,9 +279,9 @@ public class BoolQueryBuilder extends AbstractQueryBuilder<BoolQueryBuilder> {
return validationException;
}
private static void addBooleanClauses(QueryParseContext parseContext, BooleanQuery booleanQuery, List<QueryBuilder> clauses, Occur occurs) throws IOException {
private static void addBooleanClauses(QueryShardContext context, BooleanQuery booleanQuery, List<QueryBuilder> clauses, Occur occurs) throws IOException {
for (QueryBuilder query : clauses) {
Query luceneQuery = query.toQuery(parseContext);
Query luceneQuery = query.toQuery(context);
if (luceneQuery != null) {
booleanQuery.add(new BooleanClause(luceneQuery, occurs));
}

View File

@ -129,9 +129,9 @@ public class BoostingQueryBuilder extends AbstractQueryBuilder<BoostingQueryBuil
}
@Override
protected Query doToQuery(QueryParseContext parseContext) throws IOException {
Query positive = positiveQuery.toQuery(parseContext);
Query negative = negativeQuery.toQuery(parseContext);
protected Query doToQuery(QueryShardContext context) throws IOException {
Query positive = positiveQuery.toQuery(context);
Query negative = negativeQuery.toQuery(context);
// make upstream queries ignore this query by returning `null`
// if either inner query builder returns null
if (positive == null || negative == null) {

View File

@ -228,9 +228,9 @@ public class CommonTermsQueryBuilder extends AbstractQueryBuilder<CommonTermsQue
}
@Override
protected Query doToQuery(QueryParseContext parseContext) throws IOException {
protected Query doToQuery(QueryShardContext context) throws IOException {
String field;
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
MappedFieldType fieldType = context.fieldMapper(fieldName);
if (fieldType != null) {
field = fieldType.names().indexName();
} else {
@ -240,14 +240,14 @@ public class CommonTermsQueryBuilder extends AbstractQueryBuilder<CommonTermsQue
Analyzer analyzerObj;
if (analyzer == null) {
if (fieldType != null) {
analyzerObj = parseContext.getSearchAnalyzer(fieldType);
analyzerObj = context.getSearchAnalyzer(fieldType);
} else {
analyzerObj = parseContext.mapperService().searchAnalyzer();
analyzerObj = context.mapperService().searchAnalyzer();
}
} else {
analyzerObj = parseContext.mapperService().analysisService().analyzer(analyzer);
analyzerObj = context.mapperService().analysisService().analyzer(analyzer);
if (analyzerObj == null) {
throw new IllegalArgumentException("no analyzer found for [" + analyzer + "]");
throw new QueryShardException(context, "[common] analyzer [" + analyzer + "] not found");
}
}

View File

@ -87,9 +87,6 @@ public class CommonTermsQueryParser extends BaseQueryParser {
text = parser.objectText();
} else if ("analyzer".equals(currentFieldName)) {
analyzer = parser.text();
if (parseContext.analysisService().analyzer(analyzer) == null) {
throw new QueryParsingException(parseContext, "[common] analyzer [" + parser.text() + "] not found");
}
} else if ("disable_coord".equals(currentFieldName) || "disableCoord".equals(currentFieldName)) {
disableCoord = parser.booleanValue();
} else if ("boost".equals(currentFieldName)) {

View File

@ -67,13 +67,13 @@ public class ConstantScoreQueryBuilder extends AbstractQueryBuilder<ConstantScor
}
@Override
protected Query doToQuery(QueryParseContext parseContext) throws IOException {
Query innerFilter = filterBuilder.toQuery(parseContext);
protected Query doToQuery(QueryShardContext context) throws IOException {
Query innerFilter = filterBuilder.toQuery(context);
if (innerFilter == null ) {
// return null so that parent queries (e.g. bool) also ignore this
return null;
}
return new ConstantScoreQuery(filterBuilder.toQuery(parseContext));
return new ConstantScoreQuery(filterBuilder.toQuery(context));
}
@Override

View File

@ -96,9 +96,9 @@ public class DisMaxQueryBuilder extends AbstractQueryBuilder<DisMaxQueryBuilder>
}
@Override
protected Query doToQuery(QueryParseContext parseContext) throws IOException {
protected Query doToQuery(QueryShardContext context) throws IOException {
// return null if there are no queries at all
Collection<Query> luceneQueries = toQueries(queries, parseContext);
Collection<Query> luceneQueries = toQueries(queries, context);
if (luceneQueries.isEmpty()) {
return null;
}

View File

@ -62,7 +62,7 @@ public class EmptyQueryBuilder extends ToXContentToBytes implements QueryBuilder
}
@Override
public Query toQuery(QueryParseContext parseContext) throws IOException {
public Query toQuery(QueryShardContext context) throws IOException {
// empty
return null;
}

View File

@ -63,8 +63,8 @@ public class ExistsQueryBuilder extends AbstractQueryBuilder<ExistsQueryBuilder>
}
@Override
protected Query doToQuery(QueryParseContext parseContext) throws IOException {
return newFilter(parseContext, name);
protected Query doToQuery(QueryShardContext context) throws IOException {
return newFilter(context, name);
}
@Override
@ -73,20 +73,20 @@ public class ExistsQueryBuilder extends AbstractQueryBuilder<ExistsQueryBuilder>
return null;
}
public static Query newFilter(QueryParseContext parseContext, String fieldPattern) {
final FieldNamesFieldMapper.FieldNamesFieldType fieldNamesFieldType = (FieldNamesFieldMapper.FieldNamesFieldType)parseContext.mapperService().fullName(FieldNamesFieldMapper.NAME);
public static Query newFilter(QueryShardContext context, String fieldPattern) {
final FieldNamesFieldMapper.FieldNamesFieldType fieldNamesFieldType = (FieldNamesFieldMapper.FieldNamesFieldType)context.mapperService().fullName(FieldNamesFieldMapper.NAME);
if (fieldNamesFieldType == null) {
// can only happen when no types exist, so no docs exist either
return Queries.newMatchNoDocsQuery();
}
ObjectMapper objectMapper = parseContext.getObjectMapper(fieldPattern);
ObjectMapper objectMapper = context.getObjectMapper(fieldPattern);
if (objectMapper != null) {
// automatic make the object mapper pattern
fieldPattern = fieldPattern + ".*";
}
Collection<String> fields = parseContext.simpleMatchToIndexNames(fieldPattern);
Collection<String> fields = context.simpleMatchToIndexNames(fieldPattern);
if (fields.isEmpty()) {
// no fields exists, so we should not match anything
return Queries.newMatchNoDocsQuery();
@ -94,7 +94,7 @@ public class ExistsQueryBuilder extends AbstractQueryBuilder<ExistsQueryBuilder>
BooleanQuery boolFilter = new BooleanQuery();
for (String field : fields) {
MappedFieldType fieldType = parseContext.fieldMapper(field);
MappedFieldType fieldType = context.fieldMapper(field);
Query filter = null;
if (fieldNamesFieldType.isEnabled()) {
final String f;
@ -103,7 +103,7 @@ public class ExistsQueryBuilder extends AbstractQueryBuilder<ExistsQueryBuilder>
} else {
f = field;
}
filter = fieldNamesFieldType.termQuery(f, parseContext);
filter = fieldNamesFieldType.termQuery(f, context);
}
// if _field_names are not indexed, we need to go the slow way
if (filter == null && fieldType != null) {

View File

@ -69,9 +69,9 @@ public class FQueryFilterBuilder extends AbstractQueryBuilder<FQueryFilterBuilde
}
@Override
protected Query doToQuery(QueryParseContext parseContext) throws IOException {
protected Query doToQuery(QueryShardContext context) throws IOException {
// inner query builder can potentially be `null`, in that case we ignore it
Query innerQuery = this.queryBuilder.toQuery(parseContext);
Query innerQuery = this.queryBuilder.toQuery(context);
if (innerQuery == null) {
return null;
}

View File

@ -76,13 +76,13 @@ public class FieldMaskingSpanQueryBuilder extends AbstractQueryBuilder<FieldMask
}
@Override
protected SpanQuery doToQuery(QueryParseContext parseContext) throws IOException {
protected SpanQuery doToQuery(QueryShardContext context) throws IOException {
String fieldInQuery = fieldName;
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
MappedFieldType fieldType = context.fieldMapper(fieldName);
if (fieldType != null) {
fieldInQuery = fieldType.names().indexName();
}
Query innerQuery = queryBuilder.toQuery(parseContext);
Query innerQuery = queryBuilder.toQuery(context);
assert innerQuery instanceof SpanQuery;
return new FieldMaskingSpanQuery((SpanQuery)innerQuery, fieldInQuery);
}

View File

@ -95,9 +95,9 @@ public class FilteredQueryBuilder extends AbstractQueryBuilder<FilteredQueryBuil
}
@Override
public Query doToQuery(QueryParseContext parseContext) throws QueryParsingException, IOException {
Query query = queryBuilder.toQuery(parseContext);
Query filter = filterBuilder.toQuery(parseContext);
public Query doToQuery(QueryShardContext context) throws QueryShardException, IOException {
Query query = queryBuilder.toQuery(context);
Query filter = filterBuilder.toQuery(context);
if (query == null) {
// Most likely this query was generated from the JSON query DSL - it parsed to an EmptyQueryBuilder so we ignore

View File

@ -52,7 +52,8 @@ public class FuzzyQueryParser extends BaseQueryParserTemp {
}
@Override
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
public Query parse(QueryShardContext context) throws IOException, QueryParsingException {
QueryParseContext parseContext = context.parseContext();
XContentParser parser = parseContext.parser();
XContentParser.Token token = parser.nextToken();
@ -114,7 +115,7 @@ public class FuzzyQueryParser extends BaseQueryParserTemp {
}
Query query = null;
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
MappedFieldType fieldType = context.fieldMapper(fieldName);
if (fieldType != null) {
query = fieldType.fuzzyQuery(value, fuzziness, prefixLength, maxExpansions, transpositions);
}
@ -128,7 +129,7 @@ public class FuzzyQueryParser extends BaseQueryParserTemp {
query.setBoost(boost);
if (queryName != null) {
parseContext.addNamedQuery(queryName, query);
context.addNamedQuery(queryName, query);
}
return query;
}

View File

@ -67,7 +67,8 @@ public class GeoBoundingBoxQueryParser extends BaseQueryParserTemp {
}
@Override
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
public Query parse(QueryShardContext context) throws IOException, QueryParsingException {
QueryParseContext parseContext = context.parseContext();
XContentParser parser = parseContext.parser();
String fieldName = null;
@ -164,7 +165,7 @@ public class GeoBoundingBoxQueryParser extends BaseQueryParserTemp {
}
}
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
MappedFieldType fieldType = context.fieldMapper(fieldName);
if (fieldType == null) {
throw new QueryParsingException(parseContext, "failed to parse [{}] query. could not find [{}] field [{}]", NAME, GeoPointFieldMapper.CONTENT_TYPE, fieldName);
}
@ -177,7 +178,7 @@ public class GeoBoundingBoxQueryParser extends BaseQueryParserTemp {
if ("indexed".equals(type)) {
filter = IndexedGeoBoundingBoxQuery.create(topLeft, bottomRight, geoFieldType);
} else if ("memory".equals(type)) {
IndexGeoPointFieldData indexFieldData = parseContext.getForField(fieldType);
IndexGeoPointFieldData indexFieldData = context.getForField(fieldType);
filter = new InMemoryGeoBoundingBoxQuery(topLeft, bottomRight, indexFieldData);
} else {
throw new QueryParsingException(parseContext, "failed to parse [{}] query. geo bounding box type [{}] is not supported. either [indexed] or [memory] are allowed", NAME, type);
@ -186,7 +187,7 @@ public class GeoBoundingBoxQueryParser extends BaseQueryParserTemp {
filter.setBoost(boost);
}
if (queryName != null) {
parseContext.addNamedQuery(queryName, filter);
context.addNamedQuery(queryName, filter);
}
return filter;
}

View File

@ -54,7 +54,8 @@ public class GeoDistanceQueryParser extends BaseQueryParserTemp {
}
@Override
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
public Query parse(QueryShardContext context) throws IOException, QueryParsingException {
QueryParseContext parseContext = context.parseContext();
XContentParser parser = parseContext.parser();
XContentParser.Token token;
@ -148,7 +149,7 @@ public class GeoDistanceQueryParser extends BaseQueryParserTemp {
GeoUtils.normalizePoint(point, normalizeLat, normalizeLon);
}
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
MappedFieldType fieldType = context.fieldMapper(fieldName);
if (fieldType == null) {
throw new QueryParsingException(parseContext, "failed to find geo_point field [" + fieldName + "]");
}
@ -158,10 +159,10 @@ public class GeoDistanceQueryParser extends BaseQueryParserTemp {
GeoPointFieldMapper.GeoPointFieldType geoFieldType = ((GeoPointFieldMapper.GeoPointFieldType) fieldType);
IndexGeoPointFieldData indexFieldData = parseContext.getForField(fieldType);
IndexGeoPointFieldData indexFieldData = context.getForField(fieldType);
Query query = new GeoDistanceRangeQuery(point, null, distance, true, false, geoDistance, geoFieldType, indexFieldData, optimizeBbox);
if (queryName != null) {
parseContext.addNamedQuery(queryName, query);
context.addNamedQuery(queryName, query);
}
query.setBoost(boost);
return query;

View File

@ -54,7 +54,8 @@ public class GeoDistanceRangeQueryParser extends BaseQueryParserTemp {
}
@Override
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
public Query parse(QueryShardContext context) throws IOException, QueryParsingException {
QueryParseContext parseContext = context.parseContext();
XContentParser parser = parseContext.parser();
XContentParser.Token token;
@ -188,7 +189,7 @@ public class GeoDistanceRangeQueryParser extends BaseQueryParserTemp {
GeoUtils.normalizePoint(point, normalizeLat, normalizeLon);
}
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
MappedFieldType fieldType = context.fieldMapper(fieldName);
if (fieldType == null) {
throw new QueryParsingException(parseContext, "failed to find geo_point field [" + fieldName + "]");
}
@ -197,10 +198,10 @@ public class GeoDistanceRangeQueryParser extends BaseQueryParserTemp {
}
GeoPointFieldMapper.GeoPointFieldType geoFieldType = ((GeoPointFieldMapper.GeoPointFieldType) fieldType);
IndexGeoPointFieldData indexFieldData = parseContext.getForField(fieldType);
IndexGeoPointFieldData indexFieldData = context.getForField(fieldType);
Query query = new GeoDistanceRangeQuery(point, from, to, includeLower, includeUpper, geoDistance, geoFieldType, indexFieldData, optimizeBbox);
if (queryName != null) {
parseContext.addNamedQuery(queryName, query);
context.addNamedQuery(queryName, query);
}
query.setBoost(boost);
return query;

View File

@ -60,7 +60,8 @@ public class GeoPolygonQueryParser extends BaseQueryParserTemp {
}
@Override
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
public Query parse(QueryShardContext context) throws IOException, QueryParsingException {
QueryParseContext parseContext = context.parseContext();
XContentParser parser = parseContext.parser();
String fieldName = null;
@ -140,7 +141,7 @@ public class GeoPolygonQueryParser extends BaseQueryParserTemp {
}
}
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
MappedFieldType fieldType = context.fieldMapper(fieldName);
if (fieldType == null) {
throw new QueryParsingException(parseContext, "failed to find geo_point field [" + fieldName + "]");
}
@ -148,10 +149,10 @@ public class GeoPolygonQueryParser extends BaseQueryParserTemp {
throw new QueryParsingException(parseContext, "field [" + fieldName + "] is not a geo_point field");
}
IndexGeoPointFieldData indexFieldData = parseContext.getForField(fieldType);
IndexGeoPointFieldData indexFieldData = context.getForField(fieldType);
Query query = new GeoPolygonQuery(indexFieldData, shell.toArray(new GeoPoint[shell.size()]));
if (queryName != null) {
parseContext.addNamedQuery(queryName, query);
context.addNamedQuery(queryName, query);
}
query.setBoost(boost);
return query;

View File

@ -53,7 +53,8 @@ public class GeoShapeQueryParser extends BaseQueryParserTemp {
}
@Override
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
public Query parse(QueryShardContext context) throws IOException, QueryParsingException {
QueryParseContext parseContext = context.parseContext();
XContentParser parser = parseContext.parser();
String fieldName = null;
@ -136,7 +137,7 @@ public class GeoShapeQueryParser extends BaseQueryParserTemp {
throw new QueryParsingException(parseContext, "No Shape Relation defined");
}
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
MappedFieldType fieldType = context.fieldMapper(fieldName);
if (fieldType == null) {
throw new QueryParsingException(parseContext, "Failed to find geo_shape field [" + fieldName + "]");
}
@ -157,7 +158,7 @@ public class GeoShapeQueryParser extends BaseQueryParserTemp {
// this strategy doesn't support disjoint anymore: but it did before, including creating lucene fieldcache (!)
// in this case, execute disjoint as exists && !intersects
BooleanQuery bool = new BooleanQuery();
Query exists = ExistsQueryBuilder.newFilter(parseContext, fieldName);
Query exists = ExistsQueryBuilder.newFilter(context, fieldName);
Filter intersects = strategy.makeFilter(getArgs(shape, ShapeRelation.INTERSECTS));
bool.add(exists, BooleanClause.Occur.MUST);
bool.add(intersects, BooleanClause.Occur.MUST_NOT);
@ -167,7 +168,7 @@ public class GeoShapeQueryParser extends BaseQueryParserTemp {
}
query.setBoost(boost);
if (queryName != null) {
parseContext.addNamedQuery(queryName, query);
context.addNamedQuery(queryName, query);
}
return query;
}

View File

@ -69,7 +69,7 @@ public class GeohashCellQuery {
* @param geohashes optional array of additional geohashes
* @return a new GeoBoundinboxfilter
*/
public static Query create(QueryParseContext context, GeoPointFieldMapper.GeoPointFieldType fieldType, String geohash, @Nullable List<CharSequence> geohashes) {
public static Query create(QueryShardContext context, GeoPointFieldMapper.GeoPointFieldType fieldType, String geohash, @Nullable List<CharSequence> geohashes) {
MappedFieldType geoHashMapper = fieldType.geohashFieldType();
if (geoHashMapper == null) {
throw new IllegalArgumentException("geohash filter needs geohash_prefix to be enabled");
@ -186,7 +186,8 @@ public class GeohashCellQuery {
}
@Override
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
public Query parse(QueryShardContext context) throws IOException, QueryParsingException {
QueryParseContext parseContext = context.parseContext();
XContentParser parser = parseContext.parser();
String fieldName = null;
@ -248,7 +249,7 @@ public class GeohashCellQuery {
throw new QueryParsingException(parseContext, "failed to parse [{}] query. missing geohash value", NAME);
}
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
MappedFieldType fieldType = context.fieldMapper(fieldName);
if (fieldType == null) {
throw new QueryParsingException(parseContext, "failed to parse [{}] query. missing [{}] field [{}]", NAME, GeoPointFieldMapper.CONTENT_TYPE, fieldName);
}
@ -269,12 +270,12 @@ public class GeohashCellQuery {
Query filter;
if (neighbors) {
filter = create(parseContext, geoFieldType, geohash, GeoHashUtils.addNeighbors(geohash, new ArrayList<CharSequence>(8)));
filter = create(context, geoFieldType, geohash, GeoHashUtils.addNeighbors(geohash, new ArrayList<CharSequence>(8)));
} else {
filter = create(parseContext, geoFieldType, geohash, null);
filter = create(context, geoFieldType, geohash, null);
}
if (queryName != null) {
parseContext.addNamedQuery(queryName, filter);
context.addNamedQuery(queryName, filter);
}
if (filter != null) {
filter.setBoost(boost);

View File

@ -22,9 +22,6 @@ package org.elasticsearch.index.query;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.MultiDocValues;
import org.apache.lucene.search.*;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.QueryWrapperFilter;
import org.apache.lucene.search.join.BitDocIdSetFilter;
import org.elasticsearch.common.ParseField;
import org.apache.lucene.search.join.JoinUtil;
@ -70,7 +67,8 @@ public class HasChildQueryParser extends BaseQueryParserTemp {
}
@Override
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
public Query parse(QueryShardContext context) throws IOException, QueryParsingException {
QueryParseContext parseContext = context.parseContext();
XContentParser parser = parseContext.parser();
boolean queryFound = false;
@ -140,7 +138,7 @@ public class HasChildQueryParser extends BaseQueryParserTemp {
}
innerQuery.setBoost(boost);
DocumentMapper childDocMapper = parseContext.mapperService().documentMapper(childType);
DocumentMapper childDocMapper = context.mapperService().documentMapper(childType);
if (childDocMapper == null) {
throw new QueryParsingException(parseContext, "[has_child] No mapping for for type [" + childType + "]");
}
@ -150,14 +148,14 @@ public class HasChildQueryParser extends BaseQueryParserTemp {
}
if (innerHits != null) {
ParsedQuery parsedQuery = new ParsedQuery(innerQuery, parseContext.copyNamedQueries());
InnerHitsContext.ParentChildInnerHits parentChildInnerHits = new InnerHitsContext.ParentChildInnerHits(innerHits.v2(), parsedQuery, null, parseContext.mapperService(), childDocMapper);
ParsedQuery parsedQuery = new ParsedQuery(innerQuery, context.copyNamedQueries());
InnerHitsContext.ParentChildInnerHits parentChildInnerHits = new InnerHitsContext.ParentChildInnerHits(innerHits.v2(), parsedQuery, null, context.mapperService(), childDocMapper);
String name = innerHits.v1() != null ? innerHits.v1() : childType;
parseContext.addInnerHits(name, parentChildInnerHits);
context.addInnerHits(name, parentChildInnerHits);
}
String parentType = parentFieldMapper.type();
DocumentMapper parentDocMapper = parseContext.mapperService().documentMapper(parentType);
DocumentMapper parentDocMapper = context.mapperService().documentMapper(parentType);
if (parentDocMapper == null) {
throw new QueryParsingException(parseContext, "[has_child] Type [" + childType + "] points to a non existent parent type ["
+ parentType + "]");
@ -169,15 +167,15 @@ public class HasChildQueryParser extends BaseQueryParserTemp {
BitDocIdSetFilter nonNestedDocsFilter = null;
if (parentDocMapper.hasNestedObjects()) {
nonNestedDocsFilter = parseContext.bitsetFilter(Queries.newNonNestedFilter());
nonNestedDocsFilter = context.bitsetFilter(Queries.newNonNestedFilter());
}
// wrap the query with type query
innerQuery = Queries.filtered(innerQuery, childDocMapper.typeFilter());
final Query query;
final ParentChildIndexFieldData parentChildIndexFieldData = parseContext.getForField(parentFieldMapper.fieldType());
if (parseContext.indexVersionCreated().onOrAfter(Version.V_2_0_0_beta1)) {
final ParentChildIndexFieldData parentChildIndexFieldData = context.getForField(parentFieldMapper.fieldType());
if (context.indexVersionCreated().onOrAfter(Version.V_2_0_0_beta1)) {
query = joinUtilHelper(parentType, parentChildIndexFieldData, parentDocMapper.typeFilter(), scoreType, innerQuery, minChildren, maxChildren);
} else {
// TODO: use the query API
@ -191,7 +189,7 @@ public class HasChildQueryParser extends BaseQueryParserTemp {
}
}
if (queryName != null) {
parseContext.addNamedQuery(queryName, query);
context.addNamedQuery(queryName, query);
}
query.setBoost(boost);
return query;

View File

@ -64,7 +64,8 @@ public class HasParentQueryParser extends BaseQueryParserTemp {
}
@Override
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
public Query parse(QueryShardContext context) throws IOException, QueryParsingException {
QueryParseContext parseContext = context.parseContext();
XContentParser parser = parseContext.parser();
boolean queryFound = false;
@ -133,40 +134,40 @@ public class HasParentQueryParser extends BaseQueryParserTemp {
}
innerQuery.setBoost(boost);
Query query = createParentQuery(innerQuery, parentType, score, parseContext, innerHits);
Query query = createParentQuery(innerQuery, parentType, score, context, innerHits);
if (query == null) {
return null;
}
query.setBoost(boost);
if (queryName != null) {
parseContext.addNamedQuery(queryName, query);
context.addNamedQuery(queryName, query);
}
return query;
}
static Query createParentQuery(Query innerQuery, String parentType, boolean score, QueryParseContext parseContext, Tuple<String, SubSearchContext> innerHits) throws IOException {
DocumentMapper parentDocMapper = parseContext.mapperService().documentMapper(parentType);
static Query createParentQuery(Query innerQuery, String parentType, boolean score, QueryShardContext context, Tuple<String, SubSearchContext> innerHits) throws IOException {
DocumentMapper parentDocMapper = context.mapperService().documentMapper(parentType);
if (parentDocMapper == null) {
throw new QueryParsingException(parseContext, "[has_parent] query configured 'parent_type' [" + parentType
throw new QueryParsingException(context.parseContext(), "[has_parent] query configured 'parent_type' [" + parentType
+ "] is not a valid type");
}
if (innerHits != null) {
ParsedQuery parsedQuery = new ParsedQuery(innerQuery, parseContext.copyNamedQueries());
InnerHitsContext.ParentChildInnerHits parentChildInnerHits = new InnerHitsContext.ParentChildInnerHits(innerHits.v2(), parsedQuery, null, parseContext.mapperService(), parentDocMapper);
ParsedQuery parsedQuery = new ParsedQuery(innerQuery, context.copyNamedQueries());
InnerHitsContext.ParentChildInnerHits parentChildInnerHits = new InnerHitsContext.ParentChildInnerHits(innerHits.v2(), parsedQuery, null, context.mapperService(), parentDocMapper);
String name = innerHits.v1() != null ? innerHits.v1() : parentType;
parseContext.addInnerHits(name, parentChildInnerHits);
context.addInnerHits(name, parentChildInnerHits);
}
Set<String> parentTypes = new HashSet<>(5);
parentTypes.add(parentDocMapper.type());
ParentChildIndexFieldData parentChildIndexFieldData = null;
for (DocumentMapper documentMapper : parseContext.mapperService().docMappers(false)) {
for (DocumentMapper documentMapper : context.mapperService().docMappers(false)) {
ParentFieldMapper parentFieldMapper = documentMapper.parentFieldMapper();
if (parentFieldMapper.active()) {
DocumentMapper parentTypeDocumentMapper = parseContext.mapperService().documentMapper(parentFieldMapper.type());
parentChildIndexFieldData = parseContext.getForField(parentFieldMapper.fieldType());
DocumentMapper parentTypeDocumentMapper = context.mapperService().documentMapper(parentFieldMapper.type());
parentChildIndexFieldData = context.getForField(parentFieldMapper.fieldType());
if (parentTypeDocumentMapper == null) {
// Only add this, if this parentFieldMapper (also a parent) isn't a child of another parent.
parentTypes.add(parentFieldMapper.type());
@ -174,19 +175,19 @@ public class HasParentQueryParser extends BaseQueryParserTemp {
}
}
if (parentChildIndexFieldData == null) {
throw new QueryParsingException(parseContext, "[has_parent] no _parent field configured");
throw new QueryParsingException(context.parseContext(), "[has_parent] no _parent field configured");
}
Query parentFilter = null;
if (parentTypes.size() == 1) {
DocumentMapper documentMapper = parseContext.mapperService().documentMapper(parentTypes.iterator().next());
DocumentMapper documentMapper = context.mapperService().documentMapper(parentTypes.iterator().next());
if (documentMapper != null) {
parentFilter = documentMapper.typeFilter();
}
} else {
BooleanQuery parentsFilter = new BooleanQuery();
for (String parentTypeStr : parentTypes) {
DocumentMapper documentMapper = parseContext.mapperService().documentMapper(parentTypeStr);
DocumentMapper documentMapper = context.mapperService().documentMapper(parentTypeStr);
if (documentMapper != null) {
parentsFilter.add(documentMapper.typeFilter(), BooleanClause.Occur.SHOULD);
}
@ -201,7 +202,7 @@ public class HasParentQueryParser extends BaseQueryParserTemp {
// wrap the query with type query
innerQuery = Queries.filtered(innerQuery, parentDocMapper.typeFilter());
Filter childrenFilter = new QueryWrapperFilter(Queries.not(parentFilter));
if (parseContext.indexVersionCreated().onOrAfter(Version.V_2_0_0_beta1)) {
if (context.indexVersionCreated().onOrAfter(Version.V_2_0_0_beta1)) {
ScoreType scoreMode = score ? ScoreType.MAX : ScoreType.NONE;
return joinUtilHelper(parentType, parentChildIndexFieldData, childrenFilter, scoreMode, innerQuery, 0, Integer.MAX_VALUE);
} else {

View File

@ -124,16 +124,16 @@ public class IdsQueryBuilder extends AbstractQueryBuilder<IdsQueryBuilder> {
}
@Override
protected Query doToQuery(QueryParseContext parseContext) throws IOException {
protected Query doToQuery(QueryShardContext context) throws IOException {
Query query;
if (this.ids.isEmpty()) {
query = Queries.newMatchNoDocsQuery();
} else {
Collection<String> typesForQuery;
if (types == null || types.length == 0) {
typesForQuery = parseContext.queryTypes();
typesForQuery = context.queryTypes();
} else if (types.length == 1 && MetaData.ALL.equals(types[0])) {
typesForQuery = parseContext.mapperService().types();
typesForQuery = context.mapperService().types();
} else {
typesForQuery = Sets.newHashSet(types);
}

View File

@ -53,10 +53,10 @@ public class IndexQueryParserService extends AbstractIndexComponent {
public static final String PARSE_STRICT = "index.query.parse.strict";
public static final String ALLOW_UNMAPPED = "index.query.parse.allow_unmapped_fields";
private CloseableThreadLocal<QueryParseContext> cache = new CloseableThreadLocal<QueryParseContext>() {
private CloseableThreadLocal<QueryShardContext> cache = new CloseableThreadLocal<QueryShardContext>() {
@Override
protected QueryParseContext initialValue() {
return new QueryParseContext(index, IndexQueryParserService.this);
protected QueryShardContext initialValue() {
return new QueryShardContext(index, IndexQueryParserService.this);
}
};
@ -120,16 +120,20 @@ public class IndexQueryParserService extends AbstractIndexComponent {
return indicesQueriesRegistry.queryParsers().get(name);
}
public IndicesQueriesRegistry indicesQueriesRegistry() {
return indicesQueriesRegistry;
}
public ParsedQuery parse(QueryBuilder queryBuilder) {
XContentParser parser = null;
try {
BytesReference bytes = queryBuilder.buildAsBytes();
parser = XContentFactory.xContent(bytes).createParser(bytes);
return parse(cache.get(), parser);
} catch (QueryParsingException e) {
} catch (QueryShardException e) {
throw e;
} catch (Exception e) {
throw new QueryParsingException(getParseContext(), "Failed to parse", e);
throw new QueryParsingException(getShardContext().parseContext(), "Failed to parse", e);
} finally {
if (parser != null) {
parser.close();
@ -146,10 +150,10 @@ public class IndexQueryParserService extends AbstractIndexComponent {
try {
parser = XContentFactory.xContent(source, offset, length).createParser(source, offset, length);
return parse(cache.get(), parser);
} catch (QueryParsingException e) {
} catch (QueryShardException e) {
throw e;
} catch (Exception e) {
throw new QueryParsingException(getParseContext(), "Failed to parse", e);
throw new QueryParsingException(getShardContext().parseContext(), "Failed to parse", e);
} finally {
if (parser != null) {
parser.close();
@ -161,7 +165,8 @@ public class IndexQueryParserService extends AbstractIndexComponent {
return parse(cache.get(), source);
}
public ParsedQuery parse(QueryParseContext context, BytesReference source) {
//norelease
public ParsedQuery parse(QueryShardContext context, BytesReference source) {
XContentParser parser = null;
try {
parser = XContentFactory.xContent(source).createParser(source);
@ -169,7 +174,7 @@ public class IndexQueryParserService extends AbstractIndexComponent {
} catch (QueryParsingException e) {
throw e;
} catch (Exception e) {
throw new QueryParsingException(context, "Failed to parse", e);
throw new QueryParsingException(context.parseContext(), "Failed to parse", e);
} finally {
if (parser != null) {
parser.close();
@ -177,15 +182,15 @@ public class IndexQueryParserService extends AbstractIndexComponent {
}
}
public ParsedQuery parse(String source) throws QueryParsingException {
public ParsedQuery parse(String source) throws QueryParsingException, QueryShardException {
XContentParser parser = null;
try {
parser = XContentFactory.xContent(source).createParser(source);
return innerParse(cache.get(), parser);
} catch (QueryParsingException e) {
} catch (QueryShardException|QueryParsingException e) {
throw e;
} catch (Exception e) {
throw new QueryParsingException(getParseContext(), "Failed to parse [" + source + "]", e);
throw new QueryParsingException(getShardContext().parseContext(), "Failed to parse [" + source + "]", e);
} finally {
if (parser != null) {
parser.close();
@ -197,11 +202,12 @@ public class IndexQueryParserService extends AbstractIndexComponent {
return parse(cache.get(), parser);
}
public ParsedQuery parse(QueryParseContext context, XContentParser parser) {
//norelease
public ParsedQuery parse(QueryShardContext context, XContentParser parser) {
try {
return innerParse(context, parser);
} catch (IOException e) {
throw new QueryParsingException(context, "Failed to parse", e);
throw new QueryParsingException(context.parseContext(), "Failed to parse", e);
}
}
@ -209,11 +215,12 @@ public class IndexQueryParserService extends AbstractIndexComponent {
* Parses an inner filter, returning null if the filter should be ignored.
*/
@Nullable
//norelease
public ParsedQuery parseInnerFilter(XContentParser parser) throws IOException {
QueryParseContext context = cache.get();
QueryShardContext context = cache.get();
context.reset(parser);
try {
Query filter = context.parseInnerFilter();
Query filter = context.parseContext().parseInnerFilter();
if (filter == null) {
return null;
}
@ -224,27 +231,23 @@ public class IndexQueryParserService extends AbstractIndexComponent {
}
@Nullable
public Query parseInnerQuery(XContentParser parser) throws IOException {
QueryParseContext context = cache.get();
context.reset(parser);
try {
return context.parseInnerQuery();
} finally {
context.reset(null);
}
public QueryBuilder parseInnerQueryBuilder(QueryParseContext parseContext) throws IOException {
parseContext.parseFieldMatcher(parseFieldMatcher);
QueryBuilder query = parseContext.parseInnerQueryBuilder();
return query;
}
@Nullable
public Query parseInnerQuery(QueryParseContext parseContext) throws IOException {
parseContext.parseFieldMatcher(parseFieldMatcher);
Query query = parseContext.parseInnerQuery();
//norelease
public Query parseInnerQuery(QueryShardContext context) throws IOException {
Query query = context.parseContext().parseInnerQueryBuilder().toQuery(context);
if (query == null) {
query = Queries.newMatchNoDocsQuery();
}
return query;
}
public QueryParseContext getParseContext() {
public QueryShardContext getShardContext() {
return cache.get();
}
@ -276,36 +279,41 @@ public class IndexQueryParserService extends AbstractIndexComponent {
XContentParser qSourceParser = XContentFactory.xContent(querySource).createParser(querySource);
parsedQuery = parse(qSourceParser);
} else {
throw new QueryParsingException(getParseContext(), "request does not support [" + fieldName + "]");
throw new QueryParsingException(getShardContext().parseContext(), "request does not support [" + fieldName + "]");
}
}
}
if (parsedQuery != null) {
return parsedQuery;
}
} catch (QueryParsingException e) {
} catch (QueryShardException e) {
throw e;
} catch (Throwable e) {
throw new QueryParsingException(getParseContext(), "Failed to parse", e);
throw new QueryParsingException(getShardContext().parseContext(), "Failed to parse", e);
}
throw new QueryParsingException(getParseContext(), "Required query is missing");
throw new QueryParsingException(getShardContext().parseContext(), "Required query is missing");
}
private ParsedQuery innerParse(QueryParseContext parseContext, XContentParser parser) throws IOException, QueryParsingException {
parseContext.reset(parser);
//norelease
private ParsedQuery innerParse(QueryShardContext context, XContentParser parser) throws IOException, QueryShardException {
context.reset(parser);
try {
parseContext.parseFieldMatcher(parseFieldMatcher);
Query query = parseContext.parseInnerQuery();
if (query == null) {
query = Queries.newMatchNoDocsQuery();
}
return new ParsedQuery(query, parseContext.copyNamedQueries());
context.parseFieldMatcher(parseFieldMatcher);
return innerParse(context, context.parseContext().parseInnerQueryBuilder());
} finally {
parseContext.reset(null);
context.reset(null);
}
}
private static ParsedQuery innerParse(QueryShardContext context, QueryBuilder queryBuilder) throws IOException, QueryShardException {
Query query = queryBuilder.toQuery(context);
if (query == null) {
query = Queries.newMatchNoDocsQuery();
}
return new ParsedQuery(query, context.copyNamedQueries());
}
public ParseFieldMatcher parseFieldMatcher() {
return parseFieldMatcher;
}

View File

@ -58,7 +58,8 @@ public class IndicesQueryParser extends BaseQueryParserTemp {
}
@Override
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
public Query parse(QueryShardContext context) throws IOException, QueryParsingException {
QueryParseContext parseContext = context.parseContext();
XContentParser parser = parseContext.parser();
Query noMatchQuery = null;
@ -149,7 +150,7 @@ public class IndicesQueryParser extends BaseQueryParserTemp {
}
}
if (queryName != null) {
parseContext.addNamedQuery(queryName, chosenQuery);
context.addNamedQuery(queryName, chosenQuery);
}
chosenQuery.setBoost(boost);
return chosenQuery;

View File

@ -55,7 +55,7 @@ public class LimitQueryBuilder extends AbstractQueryBuilder<LimitQueryBuilder> {
}
@Override
protected Query doToQuery(QueryParseContext parseContext) throws IOException {
protected Query doToQuery(QueryShardContext context) throws IOException {
// this filter is deprecated and parses to a filter that matches everything
return Queries.newMatchAllQuery();
}

View File

@ -44,7 +44,7 @@ public class MatchAllQueryBuilder extends AbstractQueryBuilder<MatchAllQueryBuil
}
@Override
protected Query doToQuery(QueryParseContext parseContext) throws IOException {
protected Query doToQuery(QueryShardContext context) throws IOException {
return Queries.newMatchAllQuery();
}

View File

@ -48,7 +48,8 @@ public class MatchQueryParser extends BaseQueryParserTemp {
}
@Override
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
public Query parse(QueryShardContext context) throws IOException, QueryParsingException {
QueryParseContext parseContext = context.parseContext();
XContentParser parser = parseContext.parser();
MatchQuery.Type type = MatchQuery.Type.BOOLEAN;
@ -68,7 +69,7 @@ public class MatchQueryParser extends BaseQueryParserTemp {
Object value = null;
float boost = AbstractQueryBuilder.DEFAULT_BOOST;
MatchQuery matchQuery = new MatchQuery(parseContext);
MatchQuery matchQuery = new MatchQuery(context);
String minimumShouldMatch = null;
String queryName = null;
@ -94,7 +95,7 @@ public class MatchQueryParser extends BaseQueryParserTemp {
}
} else if ("analyzer".equals(currentFieldName)) {
String analyzer = parser.text();
if (parseContext.analysisService().analyzer(analyzer) == null) {
if (context.analysisService().analyzer(analyzer) == null) {
throw new QueryParsingException(parseContext, "[match] analyzer [" + parser.text() + "] not found");
}
matchQuery.setAnalyzer(analyzer);
@ -163,7 +164,7 @@ public class MatchQueryParser extends BaseQueryParserTemp {
}
query.setBoost(boost);
if (queryName != null) {
parseContext.addNamedQuery(queryName, query);
context.addNamedQuery(queryName, query);
}
return query;
}

View File

@ -110,28 +110,28 @@ public class MissingQueryBuilder extends AbstractQueryBuilder<MissingQueryBuilde
}
@Override
protected Query doToQuery(QueryParseContext parseContext) throws IOException {
return newFilter(parseContext, fieldPattern, existence, nullValue);
protected Query doToQuery(QueryShardContext context) throws IOException {
return newFilter(context, fieldPattern, existence, nullValue);
}
public static Query newFilter(QueryParseContext parseContext, String fieldPattern, boolean existence, boolean nullValue) {
public static Query newFilter(QueryShardContext context, String fieldPattern, boolean existence, boolean nullValue) {
if (!existence && !nullValue) {
throw new QueryParsingException(parseContext, "missing must have either existence, or null_value, or both set to true");
throw new QueryShardException(context, "missing must have either existence, or null_value, or both set to true");
}
final FieldNamesFieldMapper.FieldNamesFieldType fieldNamesFieldType = (FieldNamesFieldMapper.FieldNamesFieldType) parseContext.mapperService().fullName(FieldNamesFieldMapper.NAME);
final FieldNamesFieldMapper.FieldNamesFieldType fieldNamesFieldType = (FieldNamesFieldMapper.FieldNamesFieldType) context.mapperService().fullName(FieldNamesFieldMapper.NAME);
if (fieldNamesFieldType == null) {
// can only happen when no types exist, so no docs exist either
return Queries.newMatchNoDocsQuery();
}
ObjectMapper objectMapper = parseContext.getObjectMapper(fieldPattern);
ObjectMapper objectMapper = context.getObjectMapper(fieldPattern);
if (objectMapper != null) {
// automatic make the object mapper pattern
fieldPattern = fieldPattern + ".*";
}
Collection<String> fields = parseContext.simpleMatchToIndexNames(fieldPattern);
Collection<String> fields = context.simpleMatchToIndexNames(fieldPattern);
if (fields.isEmpty()) {
if (existence) {
// if we ask for existence of fields, and we found none, then we should match on all
@ -146,7 +146,7 @@ public class MissingQueryBuilder extends AbstractQueryBuilder<MissingQueryBuilde
if (existence) {
BooleanQuery boolFilter = new BooleanQuery();
for (String field : fields) {
MappedFieldType fieldType = parseContext.fieldMapper(field);
MappedFieldType fieldType = context.fieldMapper(field);
Query filter = null;
if (fieldNamesFieldType.isEnabled()) {
final String f;
@ -155,7 +155,7 @@ public class MissingQueryBuilder extends AbstractQueryBuilder<MissingQueryBuilde
} else {
f = field;
}
filter = fieldNamesFieldType.termQuery(f, parseContext);
filter = fieldNamesFieldType.termQuery(f, context);
}
// if _field_names are not indexed, we need to go the slow way
if (filter == null && fieldType != null) {
@ -173,7 +173,7 @@ public class MissingQueryBuilder extends AbstractQueryBuilder<MissingQueryBuilde
if (nullValue) {
for (String field : fields) {
MappedFieldType fieldType = parseContext.fieldMapper(field);
MappedFieldType fieldType = context.fieldMapper(field);
if (fieldType != null) {
nullFilter = fieldType.nullValueQuery();
}

View File

@ -21,6 +21,7 @@ package org.elasticsearch.index.query;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.queries.TermsQuery;
import org.apache.lucene.search.BooleanClause;
@ -91,11 +92,12 @@ public class MoreLikeThisQueryParser extends BaseQueryParserTemp {
}
@Override
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
public Query parse(QueryShardContext context) throws IOException, QueryParsingException {
QueryParseContext parseContext = context.parseContext();
XContentParser parser = parseContext.parser();
MoreLikeThisQuery mltQuery = new MoreLikeThisQuery();
mltQuery.setSimilarity(parseContext.searchSimilarity());
mltQuery.setSimilarity(context.searchSimilarity());
Analyzer analyzer = null;
List<String> moreLikeFields = null;
boolean failOnUnsupportedField = true;
@ -142,7 +144,7 @@ public class MoreLikeThisQueryParser extends BaseQueryParserTemp {
} else if (parseContext.parseFieldMatcher().match(currentFieldName, Fields.MINIMUM_SHOULD_MATCH)) {
mltQuery.setMinimumShouldMatch(parser.text());
} else if ("analyzer".equals(currentFieldName)) {
analyzer = parseContext.analysisService().analyzer(parser.text());
analyzer = context.analysisService().analyzer(parser.text());
} else if ("boost".equals(currentFieldName)) {
mltQuery.setBoost(parser.floatValue());
} else if (parseContext.parseFieldMatcher().match(currentFieldName, Fields.FAIL_ON_UNSUPPORTED_FIELD)) {
@ -165,7 +167,7 @@ public class MoreLikeThisQueryParser extends BaseQueryParserTemp {
moreLikeFields = Lists.newLinkedList();
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
String field = parser.text();
MappedFieldType fieldType = parseContext.fieldMapper(field);
MappedFieldType fieldType = context.fieldMapper(field);
moreLikeFields.add(fieldType == null ? field : fieldType.names().indexName());
}
} else if (parseContext.parseFieldMatcher().match(currentFieldName, Fields.DOCUMENT_IDS)) {
@ -214,14 +216,14 @@ public class MoreLikeThisQueryParser extends BaseQueryParserTemp {
// set analyzer
if (analyzer == null) {
analyzer = parseContext.mapperService().searchAnalyzer();
analyzer = context.mapperService().searchAnalyzer();
}
mltQuery.setAnalyzer(analyzer);
// set like text fields
boolean useDefaultField = (moreLikeFields == null);
if (useDefaultField) {
moreLikeFields = Lists.newArrayList(parseContext.defaultField());
moreLikeFields = Lists.newArrayList(context.defaultField());
}
// possibly remove unsupported fields
removeUnsupportedFields(moreLikeFields, analyzer, failOnUnsupportedField);
@ -232,7 +234,7 @@ public class MoreLikeThisQueryParser extends BaseQueryParserTemp {
// support for named query
if (queryName != null) {
parseContext.addNamedQuery(queryName, mltQuery);
context.addNamedQuery(queryName, mltQuery);
}
// handle like texts
@ -256,12 +258,12 @@ public class MoreLikeThisQueryParser extends BaseQueryParserTemp {
item.index(parseContext.index().name());
}
if (item.type() == null) {
if (parseContext.queryTypes().size() > 1) {
if (context.queryTypes().size() > 1) {
throw new QueryParsingException(parseContext,
"ambiguous type for item with id: " + item.id()
+ " and index: " + item.index());
} else {
item.type(parseContext.queryTypes().iterator().next());
item.type(context.queryTypes().iterator().next());
}
}
// default fields if not present but don't override for artificial docs

View File

@ -50,14 +50,15 @@ public class MultiMatchQueryParser extends BaseQueryParserTemp {
}
@Override
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
public Query parse(QueryShardContext context) throws IOException, QueryParsingException {
QueryParseContext parseContext = context.parseContext();
XContentParser parser = parseContext.parser();
Object value = null;
float boost = AbstractQueryBuilder.DEFAULT_BOOST;
Float tieBreaker = null;
MultiMatchQueryBuilder.Type type = null;
MultiMatchQuery multiMatchQuery = new MultiMatchQuery(parseContext);
MultiMatchQuery multiMatchQuery = new MultiMatchQuery(context);
String minimumShouldMatch = null;
Map<String, Float> fieldNameWithBoosts = Maps.newHashMap();
String queryName = null;
@ -70,10 +71,10 @@ public class MultiMatchQueryParser extends BaseQueryParserTemp {
} else if ("fields".equals(currentFieldName)) {
if (token == XContentParser.Token.START_ARRAY) {
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
extractFieldAndBoost(parseContext, parser, fieldNameWithBoosts);
extractFieldAndBoost(context, parser, fieldNameWithBoosts);
}
} else if (token.isValue()) {
extractFieldAndBoost(parseContext, parser, fieldNameWithBoosts);
extractFieldAndBoost(context, parser, fieldNameWithBoosts);
} else {
throw new QueryParsingException(parseContext, "[" + MultiMatchQueryBuilder.NAME + "] query does not support [" + currentFieldName + "]");
}
@ -84,7 +85,7 @@ public class MultiMatchQueryParser extends BaseQueryParserTemp {
type = MultiMatchQueryBuilder.Type.parse(parser.text(), parseContext.parseFieldMatcher());
} else if ("analyzer".equals(currentFieldName)) {
String analyzer = parser.text();
if (parseContext.analysisService().analyzer(analyzer) == null) {
if (context.analysisService().analyzer(analyzer) == null) {
throw new QueryParsingException(parseContext, "[" + MultiMatchQueryBuilder.NAME + "] analyzer [" + parser.text() + "] not found");
}
multiMatchQuery.setAnalyzer(analyzer);
@ -156,12 +157,12 @@ public class MultiMatchQueryParser extends BaseQueryParserTemp {
query.setBoost(boost);
if (queryName != null) {
parseContext.addNamedQuery(queryName, query);
context.addNamedQuery(queryName, query);
}
return query;
}
private void extractFieldAndBoost(QueryParseContext parseContext, XContentParser parser, Map<String, Float> fieldNameWithBoosts) throws IOException {
private void extractFieldAndBoost(QueryShardContext context, XContentParser parser, Map<String, Float> fieldNameWithBoosts) throws IOException {
String fField = null;
Float fBoost = null;
char[] fieldText = parser.textCharacters();
@ -179,7 +180,7 @@ public class MultiMatchQueryParser extends BaseQueryParserTemp {
}
if (Regex.isSimpleMatchPattern(fField)) {
for (String field : parseContext.mapperService().simpleMatchToIndexNames(fField)) {
for (String field : context.mapperService().simpleMatchToIndexNames(fField)) {
fieldNameWithBoosts.put(field, fBoost);
}
} else {

View File

@ -54,9 +54,10 @@ public class NestedQueryParser extends BaseQueryParserTemp {
}
@Override
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
public Query parse(QueryShardContext context) throws IOException, QueryParsingException {
QueryParseContext parseContext = context.parseContext();
XContentParser parser = parseContext.parser();
final ToBlockJoinQueryBuilder builder = new ToBlockJoinQueryBuilder(parseContext);
final ToBlockJoinQueryBuilder builder = new ToBlockJoinQueryBuilder(context);
float boost = AbstractQueryBuilder.DEFAULT_BOOST;
ScoreMode scoreMode = ScoreMode.Avg;
@ -110,7 +111,7 @@ public class NestedQueryParser extends BaseQueryParserTemp {
if (joinQuery != null) {
joinQuery.setBoost(boost);
if (queryName != null) {
parseContext.addNamedQuery(queryName, joinQuery);
context.addNamedQuery(queryName, joinQuery);
}
}
return joinQuery;
@ -121,8 +122,8 @@ public class NestedQueryParser extends BaseQueryParserTemp {
private ScoreMode scoreMode;
private Tuple<String, SubSearchContext> innerHits;
public ToBlockJoinQueryBuilder(QueryParseContext parseContext) throws IOException {
super(parseContext);
public ToBlockJoinQueryBuilder(QueryShardContext context) throws IOException {
super(context);
}
public void setScoreMode(ScoreMode scoreMode) {
@ -146,14 +147,14 @@ public class NestedQueryParser extends BaseQueryParserTemp {
innerQuery = null;
}
} else {
throw new QueryParsingException(parseContext, "[nested] requires either 'query' or 'filter' field");
throw new QueryShardException(shardContext, "[nested] requires either 'query' or 'filter' field");
}
if (innerHits != null) {
ParsedQuery parsedQuery = new ParsedQuery(innerQuery, parseContext.copyNamedQueries());
ParsedQuery parsedQuery = new ParsedQuery(innerQuery, shardContext.copyNamedQueries());
InnerHitsContext.NestedInnerHits nestedInnerHits = new InnerHitsContext.NestedInnerHits(innerHits.v2(), parsedQuery, null, getParentObjectMapper(), nestedObjectMapper);
String name = innerHits.v1() != null ? innerHits.v1() : path;
parseContext.addInnerHits(name, nestedInnerHits);
shardContext.addInnerHits(name, nestedInnerHits);
}
if (innerQuery != null) {

View File

@ -60,8 +60,8 @@ public class NotQueryBuilder extends AbstractQueryBuilder<NotQueryBuilder> {
}
@Override
protected Query doToQuery(QueryParseContext parseContext) throws IOException {
Query luceneQuery = filter.toQuery(parseContext);
protected Query doToQuery(QueryShardContext context) throws IOException {
Query luceneQuery = filter.toQuery(context);
if (luceneQuery == null) {
return null;
}

View File

@ -81,7 +81,7 @@ public class OrQueryBuilder extends AbstractQueryBuilder<OrQueryBuilder> {
}
@Override
protected Query doToQuery(QueryParseContext parseContext) throws IOException {
protected Query doToQuery(QueryShardContext context) throws IOException {
if (filters.isEmpty()) {
// no filters provided, this should be ignored upstream
return null;
@ -89,7 +89,7 @@ public class OrQueryBuilder extends AbstractQueryBuilder<OrQueryBuilder> {
BooleanQuery query = new BooleanQuery();
for (QueryBuilder f : filters) {
Query innerQuery = f.toQuery(parseContext);
Query innerQuery = f.toQuery(context);
// ignore queries that are null
if (innerQuery != null) {
query.add(innerQuery, Occur.SHOULD);

View File

@ -96,13 +96,13 @@ public class PrefixQueryBuilder extends AbstractQueryBuilder<PrefixQueryBuilder>
}
@Override
protected Query doToQuery(QueryParseContext parseContext) throws IOException {
MultiTermQuery.RewriteMethod method = QueryParsers.parseRewriteMethod(parseContext.parseFieldMatcher(), rewrite, null);
protected Query doToQuery(QueryShardContext context) throws IOException {
MultiTermQuery.RewriteMethod method = QueryParsers.parseRewriteMethod(context.parseFieldMatcher(), rewrite, null);
Query query = null;
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
MappedFieldType fieldType = context.fieldMapper(fieldName);
if (fieldType != null) {
query = fieldType.prefixQuery(value, method, parseContext);
query = fieldType.prefixQuery(value, method, context);
}
if (query == null) {
PrefixQuery prefixQuery = new PrefixQuery(new Term(fieldName, BytesRefs.toBytesRef(value)));

View File

@ -42,12 +42,12 @@ public interface QueryBuilder<QB extends QueryBuilder> extends NamedWriteable<QB
* Returns <tt>null</tt> if this query should be ignored in the context of
* parent queries.
*
* @param parseContext additional information needed to construct the queries
* @param context additional information needed to construct the queries
* @return the {@link Query} or <tt>null</tt> if this query should be ignored upstream
* @throws QueryParsingException
* @throws QueryShardException
* @throws IOException
*/
Query toQuery(QueryParseContext parseContext) throws IOException;
Query toQuery(QueryShardContext context) throws IOException;
/**
* Returns a {@link org.elasticsearch.common.bytes.BytesReference}

View File

@ -65,9 +65,9 @@ public class QueryFilterBuilder extends AbstractQueryBuilder<QueryFilterBuilder>
}
@Override
protected Query doToQuery(QueryParseContext parseContext) throws IOException {
protected Query doToQuery(QueryShardContext context) throws IOException {
// inner query builder can potentially be `null`, in that case we ignore it
Query innerQuery = this.queryBuilder.toQuery(parseContext);
Query innerQuery = this.queryBuilder.toQuery(context);
if (innerQuery == null) {
return null;
}

View File

@ -19,201 +19,111 @@
package org.elasticsearch.index.query;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Maps;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.queryparser.classic.MapperQueryParser;
import org.apache.lucene.queryparser.classic.QueryParserSettings;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.join.BitDocIdSetFilter;
import org.apache.lucene.search.similarities.Similarity;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.analysis.AnalysisService;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.mapper.*;
import org.elasticsearch.index.mapper.core.StringFieldMapper;
import org.elasticsearch.index.mapper.object.ObjectMapper;
import org.elasticsearch.index.query.support.NestedScope;
import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.search.fetch.innerhits.InnerHitsContext;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
import java.io.IOException;
import java.util.*;
public class QueryParseContext {
private static final ParseField CACHE = new ParseField("_cache").withAllDeprecated("Elasticsearch makes its own caching decisions");
private static final ParseField CACHE_KEY = new ParseField("_cache_key").withAllDeprecated("Filters are always used as cache keys");
private static ThreadLocal<String[]> typesContext = new ThreadLocal<>();
public static void setTypes(String[] types) {
typesContext.set(types);
}
public static String[] getTypes() {
return typesContext.get();
}
public static String[] setTypesWithPrevious(String[] types) {
String[] old = typesContext.get();
setTypes(types);
return old;
}
public static void removeTypes() {
typesContext.remove();
}
private final Index index;
private final Version indexVersionCreated;
private final IndexQueryParserService indexQueryParser;
private final Map<String, Query> namedQueries = Maps.newHashMap();
private final MapperQueryParser queryParser = new MapperQueryParser(this);
private XContentParser parser;
private final Index index;
//norelease this flag is also used in the QueryShardContext, we need to make sure we set it there correctly in doToQuery()
private boolean isFilter;
private ParseFieldMatcher parseFieldMatcher;
private boolean allowUnmappedFields;
//norelease this can eventually be deleted when context() method goes away
private final QueryShardContext shardContext;
private IndicesQueriesRegistry indicesQueriesRegistry;
private boolean mapUnmappedFieldAsString;
private NestedScope nestedScope;
private boolean isFilter;
public QueryParseContext(Index index, IndexQueryParserService indexQueryParser) {
public QueryParseContext(Index index, IndicesQueriesRegistry registry) {
this.index = index;
this.indexVersionCreated = Version.indexCreated(indexQueryParser.indexSettings());
this.indexQueryParser = indexQueryParser;
this.indicesQueriesRegistry = registry;
this.shardContext = null;
}
QueryParseContext(QueryShardContext context) {
this.shardContext = context;
this.index = context.index();
this.indicesQueriesRegistry = context.indexQueryParserService().indicesQueriesRegistry();
}
public void reset(XContentParser jp) {
this.parseFieldMatcher = ParseFieldMatcher.EMPTY;
this.parser = jp;
}
//norelease this is still used in BaseQueryParserTemp and FunctionScoreQueryParse, remove if not needed there anymore
@Deprecated
public QueryShardContext shardContext() {
return this.shardContext;
}
public XContentParser parser() {
return this.parser;
}
public void parseFieldMatcher(ParseFieldMatcher parseFieldMatcher) {
this.parseFieldMatcher = parseFieldMatcher;
}
public ParseFieldMatcher parseFieldMatcher() {
return parseFieldMatcher;
}
public void reset(XContentParser jp) {
allowUnmappedFields = indexQueryParser.defaultAllowUnmappedFields();
this.parseFieldMatcher = ParseFieldMatcher.EMPTY;
this.lookup = null;
this.parser = jp;
this.namedQueries.clear();
this.nestedScope = new NestedScope();
this.isFilter = false;
public boolean isDeprecatedSetting(String setting) {
return parseFieldMatcher.match(setting, CACHE) || parseFieldMatcher.match(setting, CACHE_KEY);
}
public Index index() {
return this.index;
}
public void parser(XContentParser parser) {
this.parser = parser;
}
public XContentParser parser() {
return parser;
}
public IndexQueryParserService indexQueryParserService() {
return indexQueryParser;
}
public AnalysisService analysisService() {
return indexQueryParser.analysisService;
}
public ScriptService scriptService() {
return indexQueryParser.scriptService;
}
public MapperService mapperService() {
return indexQueryParser.mapperService;
}
/**
* @deprecated replaced by calls to parseInnerFilterToQueryBuilder(String queryName) for the resulting queries
*/
@Nullable
public SimilarityService similarityService() {
return indexQueryParser.similarityService;
}
public Similarity searchSimilarity() {
return indexQueryParser.similarityService != null ? indexQueryParser.similarityService.similarity() : null;
}
public String defaultField() {
return indexQueryParser.defaultField();
}
public boolean queryStringLenient() {
return indexQueryParser.queryStringLenient();
}
public MapperQueryParser queryParser(QueryParserSettings settings) {
queryParser.reset(settings);
return queryParser;
}
public BitDocIdSetFilter bitsetFilter(Filter filter) {
return indexQueryParser.bitsetFilterCache.getBitDocIdSetFilter(filter);
}
public <IFD extends IndexFieldData<?>> IFD getForField(MappedFieldType mapper) {
return indexQueryParser.fieldDataService.getForField(mapper);
}
public void addNamedQuery(String name, Query query) {
namedQueries.put(name, query);
}
public ImmutableMap<String, Query> copyNamedQueries() {
return ImmutableMap.copyOf(namedQueries);
}
public void combineNamedQueries(QueryParseContext context) {
namedQueries.putAll(context.namedQueries);
@Deprecated
//norelease should be possible to remove after refactoring all queries
public Query parseInnerFilter(String queryName) throws IOException, QueryShardException {
assert this.shardContext != null;
QueryBuilder builder = parseInnerFilterToQueryBuilder(queryName);
return (builder != null) ? builder.toQuery(this.shardContext) : null;
}
/**
* Return whether we are currently parsing a filter or a query.
* @deprecated replaced by calls to parseInnerFilterToQueryBuilder() for the resulting queries
*/
public boolean isFilter() {
return isFilter;
@Nullable
@Deprecated
//norelease should be possible to remove after refactoring all queries
public Query parseInnerFilter() throws QueryShardException, IOException {
assert this.shardContext != null;
QueryBuilder builder = parseInnerFilterToQueryBuilder();
Query result = null;
if (builder != null) {
result = builder.toQuery(this.shardContext);
}
return result;
}
public void addInnerHits(String name, InnerHitsContext.BaseInnerHits context) {
SearchContext sc = SearchContext.current();
if (sc == null) {
throw new QueryParsingException(this, "inner_hits unsupported");
/**
* @deprecated replaced by calls to parseInnerQueryBuilder() for the resulting queries
*/
@Nullable
@Deprecated
//norelease should be possible to remove after refactoring all queries
public Query parseInnerQuery() throws IOException, QueryShardException {
QueryBuilder builder = parseInnerQueryBuilder();
Query result = null;
if (builder != null) {
result = builder.toQuery(this.shardContext);
}
InnerHitsContext innerHitsContext;
if (sc.innerHits() == null) {
innerHitsContext = new InnerHitsContext(new HashMap<String, InnerHitsContext.BaseInnerHits>());
sc.innerHits(innerHitsContext);
} else {
innerHitsContext = sc.innerHits();
}
innerHitsContext.addInnerHitDefinition(name, context);
return result;
}
/**
@ -244,7 +154,7 @@ public class QueryParseContext {
throw new QueryParsingException(this, "[_na] query malformed, no field after start_object");
}
QueryParser queryParser = indexQueryParser.queryParser(queryName);
QueryParser queryParser = queryParser(queryName);
if (queryParser == null) {
throw new QueryParsingException(this, "No query registered for [" + queryName + "]");
}
@ -257,40 +167,12 @@ public class QueryParseContext {
}
/**
* @deprecated replaced by calls to parseInnerQueryBuilder() for the resulting queries
*/
@Nullable
@Deprecated
public Query parseInnerQuery() throws IOException, QueryParsingException {
QueryBuilder builder = parseInnerQueryBuilder();
Query result = null;
if (builder != null) {
result = builder.toQuery(this);
}
return result;
}
/**
* @deprecated replaced by calls to parseInnerFilterToQueryBuilder() for the resulting queries
*/
@Nullable
@Deprecated
public Query parseInnerFilter() throws QueryParsingException, IOException {
QueryBuilder builder = parseInnerFilterToQueryBuilder();
Query result = null;
if (builder != null) {
result = builder.toQuery(this);
}
return result;
}
/**
* @return
* @throws QueryParsingException
* @return a new QueryBuilder based on the current state of the parser, but does so that the inner query
* is parsed to a filter
* @throws IOException
*/
@Nullable
public QueryBuilder parseInnerFilterToQueryBuilder() throws QueryParsingException, IOException {
public QueryBuilder parseInnerFilterToQueryBuilder() throws IOException {
final boolean originalIsFilter = isFilter;
try {
isFilter = true;
@ -300,11 +182,11 @@ public class QueryParseContext {
}
}
public QueryBuilder parseInnerFilterToQueryBuilder(String queryName) throws IOException, QueryParsingException {
QueryBuilder parseInnerFilterToQueryBuilder(String queryName) throws IOException, QueryParsingException {
final boolean originalIsFilter = isFilter;
try {
isFilter = true;
QueryParser queryParser = indexQueryParser.queryParser(queryName);
QueryParser queryParser = queryParser(queryName);
if (queryParser == null) {
throw new QueryParsingException(this, "No query registered for [" + queryName + "]");
}
@ -314,123 +196,19 @@ public class QueryParseContext {
}
}
/**
* @deprecated replaced by calls to parseInnerFilterToQueryBuilder(String queryName) for the resulting queries
*/
@Nullable
@Deprecated
public Query parseInnerFilter(String queryName) throws IOException, QueryParsingException {
QueryBuilder builder = parseInnerFilterToQueryBuilder(queryName);
return (builder != null) ? builder.toQuery(this) : null;
public boolean isFilter() {
return this.isFilter;
}
public Collection<String> simpleMatchToIndexNames(String pattern) {
return indexQueryParser.mapperService.simpleMatchToIndexNames(pattern, getTypes());
public ParseFieldMatcher parseFieldMatcher() {
return parseFieldMatcher;
}
public MappedFieldType fieldMapper(String name) {
return failIfFieldMappingNotFound(name, indexQueryParser.mapperService.smartNameFieldType(name, getTypes()));
public void parser(XContentParser innerParser) {
this.parser = innerParser;
}
public ObjectMapper getObjectMapper(String name) {
return indexQueryParser.mapperService.getObjectMapper(name, getTypes());
QueryParser queryParser(String name) {
return indicesQueriesRegistry.queryParsers().get(name);
}
/** Gets the search analyzer for the given field, or the default if there is none present for the field
* TODO: remove this by moving defaults into mappers themselves
*/
public Analyzer getSearchAnalyzer(MappedFieldType fieldType) {
if (fieldType.searchAnalyzer() != null) {
return fieldType.searchAnalyzer();
}
return mapperService().searchAnalyzer();
}
/** Gets the search quote nalyzer for the given field, or the default if there is none present for the field
* TODO: remove this by moving defaults into mappers themselves
*/
public Analyzer getSearchQuoteAnalyzer(MappedFieldType fieldType) {
if (fieldType.searchQuoteAnalyzer() != null) {
return fieldType.searchQuoteAnalyzer();
}
return mapperService().searchQuoteAnalyzer();
}
public void setAllowUnmappedFields(boolean allowUnmappedFields) {
this.allowUnmappedFields = allowUnmappedFields;
}
public void setMapUnmappedFieldAsString(boolean mapUnmappedFieldAsString) {
this.mapUnmappedFieldAsString = mapUnmappedFieldAsString;
}
private MappedFieldType failIfFieldMappingNotFound(String name, MappedFieldType fieldMapping) {
if (allowUnmappedFields) {
return fieldMapping;
} else if (mapUnmappedFieldAsString){
StringFieldMapper.Builder builder = MapperBuilders.stringField(name);
// it would be better to pass the real index settings, but they are not easily accessible from here...
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, indexQueryParser.getIndexCreatedVersion()).build();
return builder.build(new Mapper.BuilderContext(settings, new ContentPath(1))).fieldType();
} else {
Version indexCreatedVersion = indexQueryParser.getIndexCreatedVersion();
if (fieldMapping == null && indexCreatedVersion.onOrAfter(Version.V_1_4_0_Beta1)) {
throw new QueryParsingException(this, "Strict field resolution and no field mapping can be found for the field with name ["
+ name + "]");
} else {
return fieldMapping;
}
}
}
/**
* Returns the narrowed down explicit types, or, if not set, all types.
*/
public Collection<String> queryTypes() {
String[] types = getTypes();
if (types == null || types.length == 0) {
return mapperService().types();
}
if (types.length == 1 && types[0].equals("_all")) {
return mapperService().types();
}
return Arrays.asList(types);
}
private SearchLookup lookup = null;
public SearchLookup lookup() {
SearchContext current = SearchContext.current();
if (current != null) {
return current.lookup();
}
if (lookup == null) {
lookup = new SearchLookup(mapperService(), indexQueryParser.fieldDataService, null);
}
return lookup;
}
public long nowInMillis() {
SearchContext current = SearchContext.current();
if (current != null) {
return current.nowInMillis();
}
return System.currentTimeMillis();
}
public NestedScope nestedScope() {
return nestedScope;
}
/**
* Return whether the setting is deprecated.
*/
public boolean isDeprecatedSetting(String setting) {
return parseFieldMatcher.match(setting, CACHE) || parseFieldMatcher.match(setting, CACHE_KEY);
}
public Version indexVersionCreated() {
return indexVersionCreated;
}
}

View File

@ -44,10 +44,10 @@ public interface QueryParser {
*/
//norelease can be removed in favour of fromXContent once search requests can be parsed on the coordinating node
@Nullable
Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException;
Query parse(QueryShardContext context) throws IOException, QueryParsingException;
/**
* Creates a new {@link QueryBuilder} from the query held by the {@link QueryParseContext}
* Creates a new {@link QueryBuilder} from the query held by the {@link QueryShardContext}
* in {@link org.elasticsearch.common.xcontent.XContent} format
*
* @param parseContext

View File

@ -31,7 +31,8 @@ import org.elasticsearch.rest.RestStatus;
import java.io.IOException;
/**
*
* Exception that can be used when parsing queries with a given {@link QueryParseContext}.
* Can contain information about location of the error.
*/
public class QueryParsingException extends ElasticsearchException {
@ -71,6 +72,12 @@ public class QueryParsingException extends ElasticsearchException {
this.columnNumber = col;
}
public QueryParsingException(StreamInput in) throws IOException{
super(in);
lineNumber = in.readInt();
columnNumber = in.readInt();
}
/**
* Line number of the location of the error
*
@ -109,11 +116,4 @@ public class QueryParsingException extends ElasticsearchException {
out.writeInt(lineNumber);
out.writeInt(columnNumber);
}
public QueryParsingException(StreamInput in) throws IOException{
super(in);
lineNumber = in.readInt();
columnNumber = in.readInt();
}
}

View File

@ -0,0 +1,327 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.query;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Maps;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.queryparser.classic.MapperQueryParser;
import org.apache.lucene.queryparser.classic.QueryParserSettings;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.join.BitDocIdSetFilter;
import org.apache.lucene.search.similarities.Similarity;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.analysis.AnalysisService;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.mapper.ContentPath;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperBuilders;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.core.StringFieldMapper;
import org.elasticsearch.index.mapper.object.ObjectMapper;
import org.elasticsearch.index.query.support.NestedScope;
import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.search.fetch.innerhits.InnerHitsContext;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.lookup.SearchLookup;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
/**
* Context object used to create lucene queries on the shard level.
*/
public class QueryShardContext {
private static ThreadLocal<String[]> typesContext = new ThreadLocal<>();
public static void setTypes(String[] types) {
typesContext.set(types);
}
public static String[] getTypes() {
return typesContext.get();
}
public static String[] setTypesWithPrevious(String[] types) {
String[] old = typesContext.get();
setTypes(types);
return old;
}
public static void removeTypes() {
typesContext.remove();
}
private final Index index;
private final Version indexVersionCreated;
private final IndexQueryParserService indexQueryParser;
private final Map<String, Query> namedQueries = Maps.newHashMap();
private final MapperQueryParser queryParser = new MapperQueryParser(this);
private ParseFieldMatcher parseFieldMatcher;
private boolean allowUnmappedFields;
private boolean mapUnmappedFieldAsString;
private NestedScope nestedScope;
//norelease this should be possible to remove once query context are completely separated
private QueryParseContext parseContext;
private boolean isFilter;
public QueryShardContext(Index index, IndexQueryParserService indexQueryParser) {
this.index = index;
this.indexVersionCreated = Version.indexCreated(indexQueryParser.indexSettings());
this.indexQueryParser = indexQueryParser;
this.parseContext = new QueryParseContext(this);
}
public void parseFieldMatcher(ParseFieldMatcher parseFieldMatcher) {
this.parseFieldMatcher = parseFieldMatcher;
}
public ParseFieldMatcher parseFieldMatcher() {
return parseFieldMatcher;
}
private void reset() {
allowUnmappedFields = indexQueryParser.defaultAllowUnmappedFields();
this.parseFieldMatcher = ParseFieldMatcher.EMPTY;
this.lookup = null;
this.namedQueries.clear();
this.nestedScope = new NestedScope();
}
//norelease remove parser argument once query contexts are separated
public void reset(XContentParser jp) {
this.reset();
this.parseContext.reset(jp);
}
public Index index() {
return this.index;
}
public IndexQueryParserService indexQueryParserService() {
return indexQueryParser;
}
public AnalysisService analysisService() {
return indexQueryParser.analysisService;
}
public ScriptService scriptService() {
return indexQueryParser.scriptService;
}
public MapperService mapperService() {
return indexQueryParser.mapperService;
}
@Nullable
public SimilarityService similarityService() {
return indexQueryParser.similarityService;
}
public Similarity searchSimilarity() {
return indexQueryParser.similarityService != null ? indexQueryParser.similarityService.similarity() : null;
}
public String defaultField() {
return indexQueryParser.defaultField();
}
public boolean queryStringLenient() {
return indexQueryParser.queryStringLenient();
}
public MapperQueryParser queryParser(QueryParserSettings settings) {
queryParser.reset(settings);
return queryParser;
}
public BitDocIdSetFilter bitsetFilter(Filter filter) {
return indexQueryParser.bitsetFilterCache.getBitDocIdSetFilter(filter);
}
public <IFD extends IndexFieldData<?>> IFD getForField(MappedFieldType mapper) {
return indexQueryParser.fieldDataService.getForField(mapper);
}
public void addNamedQuery(String name, Query query) {
namedQueries.put(name, query);
}
public ImmutableMap<String, Query> copyNamedQueries() {
return ImmutableMap.copyOf(namedQueries);
}
public void combineNamedQueries(QueryShardContext context) {
namedQueries.putAll(context.namedQueries);
}
/**
* Return whether we are currently parsing a filter or a query.
*/
public boolean isFilter() {
return isFilter;
}
public void addInnerHits(String name, InnerHitsContext.BaseInnerHits context) {
SearchContext sc = SearchContext.current();
if (sc == null) {
throw new QueryShardException(this, "inner_hits unsupported");
}
InnerHitsContext innerHitsContext;
if (sc.innerHits() == null) {
innerHitsContext = new InnerHitsContext(new HashMap<String, InnerHitsContext.BaseInnerHits>());
sc.innerHits(innerHitsContext);
} else {
innerHitsContext = sc.innerHits();
}
innerHitsContext.addInnerHitDefinition(name, context);
}
public Collection<String> simpleMatchToIndexNames(String pattern) {
return indexQueryParser.mapperService.simpleMatchToIndexNames(pattern, getTypes());
}
public MappedFieldType fieldMapper(String name) {
return failIfFieldMappingNotFound(name, indexQueryParser.mapperService.smartNameFieldType(name, getTypes()));
}
public ObjectMapper getObjectMapper(String name) {
return indexQueryParser.mapperService.getObjectMapper(name, getTypes());
}
/** Gets the search analyzer for the given field, or the default if there is none present for the field
* TODO: remove this by moving defaults into mappers themselves
*/
public Analyzer getSearchAnalyzer(MappedFieldType fieldType) {
if (fieldType.searchAnalyzer() != null) {
return fieldType.searchAnalyzer();
}
return mapperService().searchAnalyzer();
}
/** Gets the search quote nalyzer for the given field, or the default if there is none present for the field
* TODO: remove this by moving defaults into mappers themselves
*/
public Analyzer getSearchQuoteAnalyzer(MappedFieldType fieldType) {
if (fieldType.searchQuoteAnalyzer() != null) {
return fieldType.searchQuoteAnalyzer();
}
return mapperService().searchQuoteAnalyzer();
}
public void setAllowUnmappedFields(boolean allowUnmappedFields) {
this.allowUnmappedFields = allowUnmappedFields;
}
public void setMapUnmappedFieldAsString(boolean mapUnmappedFieldAsString) {
this.mapUnmappedFieldAsString = mapUnmappedFieldAsString;
}
private MappedFieldType failIfFieldMappingNotFound(String name, MappedFieldType fieldMapping) {
if (allowUnmappedFields) {
return fieldMapping;
} else if (mapUnmappedFieldAsString){
StringFieldMapper.Builder builder = MapperBuilders.stringField(name);
// it would be better to pass the real index settings, but they are not easily accessible from here...
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, indexQueryParser.getIndexCreatedVersion()).build();
return builder.build(new Mapper.BuilderContext(settings, new ContentPath(1))).fieldType();
} else {
Version indexCreatedVersion = indexQueryParser.getIndexCreatedVersion();
if (fieldMapping == null && indexCreatedVersion.onOrAfter(Version.V_1_4_0_Beta1)) {
throw new QueryShardException(this, "Strict field resolution and no field mapping can be found for the field with name ["
+ name + "]");
} else {
return fieldMapping;
}
}
}
/**
* Returns the narrowed down explicit types, or, if not set, all types.
*/
public Collection<String> queryTypes() {
String[] types = getTypes();
if (types == null || types.length == 0) {
return mapperService().types();
}
if (types.length == 1 && types[0].equals("_all")) {
return mapperService().types();
}
return Arrays.asList(types);
}
private SearchLookup lookup = null;
public SearchLookup lookup() {
SearchContext current = SearchContext.current();
if (current != null) {
return current.lookup();
}
if (lookup == null) {
lookup = new SearchLookup(mapperService(), indexQueryParser.fieldDataService, null);
}
return lookup;
}
public long nowInMillis() {
SearchContext current = SearchContext.current();
if (current != null) {
return current.nowInMillis();
}
return System.currentTimeMillis();
}
public NestedScope nestedScope() {
return nestedScope;
}
public Version indexVersionCreated() {
return indexVersionCreated;
}
public QueryParseContext parseContext() {
return this.parseContext;
}
}

View File

@ -0,0 +1,72 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.query;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.Index;
import org.elasticsearch.rest.RestStatus;
import java.io.IOException;
/**
* Exception that is thrown when creating lucene queries on the shard
*/
public class QueryShardException extends ElasticsearchException {
public QueryShardException(QueryShardContext context, String msg, Object... args) {
this(context, msg, null, args);
}
public QueryShardException(QueryShardContext context, String msg, Throwable cause, Object... args) {
super(msg, cause, args);
setIndex(context.index());
}
/**
* This constructor is provided for use in unit tests where a
* {@link QueryShardContext} may not be available
*/
public QueryShardException(Index index, int line, int col, String msg, Throwable cause) {
super(msg, cause);
setIndex(index);
}
public QueryShardException(StreamInput in) throws IOException{
super(in);
}
@Override
public RestStatus status() {
return RestStatus.BAD_REQUEST;
}
@Override
protected void innerToXContent(XContentBuilder builder, Params params) throws IOException {
super.innerToXContent(builder, params);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
}
}

View File

@ -66,13 +66,14 @@ public class QueryStringQueryParser extends BaseQueryParserTemp {
}
@Override
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
public Query parse(QueryShardContext context) throws IOException, QueryParsingException {
QueryParseContext parseContext = context.parseContext();
XContentParser parser = parseContext.parser();
String queryName = null;
QueryParserSettings qpSettings = new QueryParserSettings();
qpSettings.defaultField(parseContext.defaultField());
qpSettings.lenient(parseContext.queryStringLenient());
qpSettings.defaultField(context.defaultField());
qpSettings.lenient(context.queryStringLenient());
qpSettings.analyzeWildcard(defaultAnalyzeWildcard);
qpSettings.allowLeadingWildcard(defaultAllowLeadingWildcard);
qpSettings.locale(Locale.ROOT);
@ -105,7 +106,7 @@ public class QueryStringQueryParser extends BaseQueryParserTemp {
}
if (Regex.isSimpleMatchPattern(fField)) {
for (String field : parseContext.mapperService().simpleMatchToIndexNames(fField)) {
for (String field : context.mapperService().simpleMatchToIndexNames(fField)) {
qpSettings.fields().add(field);
if (fBoost != -1) {
if (qpSettings.boosts() == null) {
@ -143,13 +144,13 @@ public class QueryStringQueryParser extends BaseQueryParserTemp {
throw new QueryParsingException(parseContext, "Query default operator [" + op + "] is not allowed");
}
} else if ("analyzer".equals(currentFieldName)) {
NamedAnalyzer analyzer = parseContext.analysisService().analyzer(parser.text());
NamedAnalyzer analyzer = context.analysisService().analyzer(parser.text());
if (analyzer == null) {
throw new QueryParsingException(parseContext, "[query_string] analyzer [" + parser.text() + "] not found");
}
qpSettings.forcedAnalyzer(analyzer);
} else if ("quote_analyzer".equals(currentFieldName) || "quoteAnalyzer".equals(currentFieldName)) {
NamedAnalyzer analyzer = parseContext.analysisService().analyzer(parser.text());
NamedAnalyzer analyzer = context.analysisService().analyzer(parser.text());
if (analyzer == null) {
throw new QueryParsingException(parseContext, "[query_string] quote_analyzer [" + parser.text()
+ "] not found");
@ -214,16 +215,16 @@ public class QueryStringQueryParser extends BaseQueryParserTemp {
if (qpSettings.queryString() == null) {
throw new QueryParsingException(parseContext, "query_string must be provided with a [query]");
}
qpSettings.defaultAnalyzer(parseContext.mapperService().searchAnalyzer());
qpSettings.defaultQuoteAnalyzer(parseContext.mapperService().searchQuoteAnalyzer());
qpSettings.defaultAnalyzer(context.mapperService().searchAnalyzer());
qpSettings.defaultQuoteAnalyzer(context.mapperService().searchQuoteAnalyzer());
if (qpSettings.escape()) {
qpSettings.queryString(org.apache.lucene.queryparser.classic.QueryParser.escape(qpSettings.queryString()));
}
qpSettings.queryTypes(parseContext.queryTypes());
qpSettings.queryTypes(context.queryTypes());
MapperQueryParser queryParser = parseContext.queryParser(qpSettings);
MapperQueryParser queryParser = context.queryParser(qpSettings);
try {
Query query = queryParser.parse(qpSettings.queryString());
@ -238,7 +239,7 @@ public class QueryStringQueryParser extends BaseQueryParserTemp {
Queries.applyMinimumShouldMatch((BooleanQuery) query, qpSettings.minimumShouldMatch());
}
if (queryName != null) {
parseContext.addNamedQuery(queryName, query);
context.addNamedQuery(queryName, query);
}
return query;
} catch (org.apache.lucene.queryparser.classic.ParseException e) {

View File

@ -25,7 +25,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
import java.io.IOException;
/**
* QueryBuilder implementation that holds a lucene query, which can be returned by {@link QueryBuilder#toQuery(QueryParseContext)}.
* QueryBuilder implementation that holds a lucene query, which can be returned by {@link QueryBuilder#toQuery(QueryShardContext)}.
* Doesn't support conversion to {@link org.elasticsearch.common.xcontent.XContent} via {@link #doXContent(XContentBuilder, Params)}.
*/
//norelease to be removed once all queries support separate fromXContent and toQuery methods. Make AbstractQueryBuilder#toQuery final as well then.
@ -47,7 +47,7 @@ public class QueryWrappingQueryBuilder extends AbstractQueryBuilder<QueryWrappin
}
@Override
protected Query doToQuery(QueryParseContext parseContext) throws IOException {
protected Query doToQuery(QueryShardContext context) throws IOException {
return query;
}

View File

@ -243,9 +243,9 @@ public class RangeQueryBuilder extends AbstractQueryBuilder<RangeQueryBuilder> i
}
@Override
protected Query doToQuery(QueryParseContext parseContext) throws IOException {
protected Query doToQuery(QueryShardContext context) throws IOException {
Query query = null;
MappedFieldType mapper = parseContext.fieldMapper(this.fieldName);
MappedFieldType mapper = context.fieldMapper(this.fieldName);
if (mapper != null) {
if (mapper instanceof DateFieldMapper.DateFieldType) {
DateMathParser forcedDateParser = null;
@ -259,7 +259,7 @@ public class RangeQueryBuilder extends AbstractQueryBuilder<RangeQueryBuilder> i
query = ((DateFieldMapper.DateFieldType) mapper).rangeQuery(from, to, includeLower, includeUpper, dateTimeZone, forcedDateParser);
} else {
if (timeZone != null) {
throw new QueryParsingException(parseContext, "[range] time_zone can not be applied to non date field ["
throw new QueryShardException(context, "[range] time_zone can not be applied to non date field ["
+ fieldName + "]");
}
//LUCENE 4 UPGRADE Mapper#rangeQuery should use bytesref as well?
@ -267,7 +267,7 @@ public class RangeQueryBuilder extends AbstractQueryBuilder<RangeQueryBuilder> i
}
} else {
if (timeZone != null) {
throw new QueryParsingException(parseContext, "[range] time_zone can not be applied to non unmapped field ["
throw new QueryShardException(context, "[range] time_zone can not be applied to non unmapped field ["
+ fieldName + "]");
}
}

View File

@ -149,13 +149,13 @@ public class RegexpQueryBuilder extends AbstractQueryBuilder<RegexpQueryBuilder>
}
@Override
public Query doToQuery(QueryParseContext parseContext) throws QueryParsingException, IOException {
MultiTermQuery.RewriteMethod method = QueryParsers.parseRewriteMethod(parseContext.parseFieldMatcher(), rewrite, null);
public Query doToQuery(QueryShardContext context) throws QueryShardException, IOException {
MultiTermQuery.RewriteMethod method = QueryParsers.parseRewriteMethod(context.parseFieldMatcher(), rewrite, null);
Query query = null;
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
MappedFieldType fieldType = context.fieldMapper(fieldName);
if (fieldType != null) {
query = fieldType.regexpQuery(value, flagsValue, maxDeterminizedStates, method, parseContext);
query = fieldType.regexpQuery(value, flagsValue, maxDeterminizedStates, method, context);
}
if (query == null) {
RegexpQuery regexpQuery = new RegexpQuery(new Term(fieldName, BytesRefs.toBytesRef(value)), flagsValue, maxDeterminizedStates);

View File

@ -25,7 +25,6 @@ import org.apache.lucene.search.Query;
import org.apache.lucene.search.RandomAccessWeight;
import org.apache.lucene.search.Weight;
import org.apache.lucene.util.Bits;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder;
@ -66,8 +65,8 @@ public class ScriptQueryBuilder extends AbstractQueryBuilder<ScriptQueryBuilder>
}
@Override
protected Query doToQuery(QueryParseContext parseContext) throws IOException {
return new ScriptQuery(script, parseContext.scriptService(), parseContext.lookup());
protected Query doToQuery(QueryShardContext context) throws IOException {
return new ScriptQuery(script, context.scriptService(), context.lookup());
}
@Override

View File

@ -26,7 +26,9 @@ import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.query.SimpleQueryParser.Settings;
import java.io.IOException;
@ -260,26 +262,44 @@ public class SimpleQueryStringBuilder extends AbstractQueryBuilder<SimpleQuerySt
}
@Override
protected Query doToQuery(QueryParseContext parseContext) throws IOException {
protected Query doToQuery(QueryShardContext context) throws IOException {
// Use the default field (_all) if no fields specified
if (fieldsAndWeights.isEmpty()) {
String field = parseContext.defaultField();
String field = context.defaultField();
fieldsAndWeights.put(field, 1.0F);
}
// field names in builder can have wildcards etc, need to resolve them here
Map<String, Float> resolvedFieldsAndWeights = new TreeMap<>();
for (String fField : fieldsAndWeights.keySet()) {
if (Regex.isSimpleMatchPattern(fField)) {
for (String fieldName : context.mapperService().simpleMatchToIndexNames(fField)) {
resolvedFieldsAndWeights.put(fieldName, fieldsAndWeights.get(fField));
}
} else {
MappedFieldType fieldType = context.fieldMapper(fField);
if (fieldType != null) {
resolvedFieldsAndWeights.put(fieldType.names().indexName(), fieldsAndWeights.get(fField));
} else {
resolvedFieldsAndWeights.put(fField, fieldsAndWeights.get(fField));
}
}
}
// Use standard analyzer by default if none specified
Analyzer luceneAnalyzer;
if (analyzer == null) {
luceneAnalyzer = parseContext.mapperService().searchAnalyzer();
luceneAnalyzer = context.mapperService().searchAnalyzer();
} else {
luceneAnalyzer = parseContext.analysisService().analyzer(analyzer);
luceneAnalyzer = context.analysisService().analyzer(analyzer);
if (luceneAnalyzer == null) {
throw new QueryParsingException(parseContext, "[" + SimpleQueryStringBuilder.NAME + "] analyzer [" + analyzer
throw new QueryShardException(context, "[" + SimpleQueryStringBuilder.NAME + "] analyzer [" + analyzer
+ "] not found");
}
}
SimpleQueryParser sqp = new SimpleQueryParser(luceneAnalyzer, fieldsAndWeights, flags, settings);
SimpleQueryParser sqp = new SimpleQueryParser(luceneAnalyzer, resolvedFieldsAndWeights, flags, settings);
sqp.setDefaultOperator(defaultOperator.toBooleanClauseOccur());
Query query = sqp.parse(queryText);

View File

@ -20,10 +20,7 @@
package org.elasticsearch.index.query;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.MappedFieldType;
import java.io.IOException;
import java.util.HashMap;
import java.util.Locale;
@ -108,19 +105,7 @@ public class SimpleQueryStringParser extends BaseQueryParser {
if (fField == null) {
fField = parser.text();
}
if (Regex.isSimpleMatchPattern(fField)) {
for (String fieldName : parseContext.mapperService().simpleMatchToIndexNames(fField)) {
fieldsAndWeights.put(fieldName, fBoost);
}
} else {
MappedFieldType fieldType = parseContext.fieldMapper(fField);
if (fieldType != null) {
fieldsAndWeights.put(fieldType.names().indexName(), fBoost);
} else {
fieldsAndWeights.put(fField, fBoost);
}
}
fieldsAndWeights.put(fField, fBoost);
}
} else {
throw new QueryParsingException(parseContext,

View File

@ -74,10 +74,10 @@ public class SpanContainingQueryBuilder extends AbstractQueryBuilder<SpanContain
}
@Override
protected Query doToQuery(QueryParseContext parseContext) throws IOException {
Query innerBig = big.toQuery(parseContext);
protected Query doToQuery(QueryShardContext context) throws IOException {
Query innerBig = big.toQuery(context);
assert innerBig instanceof SpanQuery;
Query innerLittle = little.toQuery(parseContext);
Query innerLittle = little.toQuery(context);
assert innerLittle instanceof SpanQuery;
return new SpanContainingQuery((SpanQuery) innerBig, (SpanQuery) innerLittle);
}

View File

@ -76,8 +76,8 @@ public class SpanFirstQueryBuilder extends AbstractQueryBuilder<SpanFirstQueryBu
}
@Override
protected Query doToQuery(QueryParseContext parseContext) throws IOException {
Query innerSpanQuery = matchBuilder.toQuery(parseContext);
protected Query doToQuery(QueryShardContext context) throws IOException {
Query innerSpanQuery = matchBuilder.toQuery(context);
assert innerSpanQuery instanceof SpanQuery;
return new SpanFirstQuery((SpanQuery) innerSpanQuery, end);
}

View File

@ -57,8 +57,8 @@ public class SpanMultiTermQueryBuilder extends AbstractQueryBuilder<SpanMultiTer
}
@Override
protected Query doToQuery(QueryParseContext parseContext) throws IOException {
Query subQuery = multiTermQueryBuilder.toQuery(parseContext);
protected Query doToQuery(QueryShardContext context) throws IOException {
Query subQuery = multiTermQueryBuilder.toQuery(context);
if (subQuery instanceof MultiTermQuery == false) {
throw new UnsupportedOperationException("unsupported inner query, should be " + MultiTermQuery.class.getName() +" but was "
+ subQuery.getClass().getName());

View File

@ -130,10 +130,10 @@ public class SpanNearQueryBuilder extends AbstractQueryBuilder<SpanNearQueryBuil
}
@Override
protected Query doToQuery(QueryParseContext parseContext) throws IOException {
protected Query doToQuery(QueryShardContext context) throws IOException {
SpanQuery[] spanQueries = new SpanQuery[clauses.size()];
for (int i = 0; i < clauses.size(); i++) {
Query query = clauses.get(i).toQuery(parseContext);
Query query = clauses.get(i).toQuery(context);
assert query instanceof SpanQuery;
spanQueries[i] = (SpanQuery) query;
}

View File

@ -130,11 +130,11 @@ public class SpanNotQueryBuilder extends AbstractQueryBuilder<SpanNotQueryBuilde
}
@Override
protected Query doToQuery(QueryParseContext parseContext) throws IOException {
protected Query doToQuery(QueryShardContext context) throws IOException {
Query includeQuery = this.include.toQuery(parseContext);
Query includeQuery = this.include.toQuery(context);
assert includeQuery instanceof SpanQuery;
Query excludeQuery = this.exclude.toQuery(parseContext);
Query excludeQuery = this.exclude.toQuery(context);
assert excludeQuery instanceof SpanQuery;
SpanNotQuery query = new SpanNotQuery((SpanQuery) includeQuery, (SpanQuery) excludeQuery, pre, post);

View File

@ -67,10 +67,10 @@ public class SpanOrQueryBuilder extends AbstractQueryBuilder<SpanOrQueryBuilder>
}
@Override
protected Query doToQuery(QueryParseContext parseContext) throws IOException {
protected Query doToQuery(QueryShardContext context) throws IOException {
SpanQuery[] spanQueries = new SpanQuery[clauses.size()];
for (int i = 0; i < clauses.size(); i++) {
Query query = clauses.get(i).toQuery(parseContext);
Query query = clauses.get(i).toQuery(context);
assert query instanceof SpanQuery;
spanQueries[i] = (SpanQuery) query;
}

View File

@ -68,7 +68,7 @@ public class SpanTermQueryBuilder extends BaseTermQueryBuilder<SpanTermQueryBuil
}
@Override
public SpanQuery doToQuery(QueryParseContext context) throws IOException {
public SpanQuery doToQuery(QueryShardContext context) throws IOException {
BytesRef valueBytes = null;
String fieldName = this.fieldName;
MappedFieldType mapper = context.fieldMapper(fieldName);

View File

@ -79,10 +79,10 @@ public class SpanWithinQueryBuilder extends AbstractQueryBuilder<SpanWithinQuery
}
@Override
protected Query doToQuery(QueryParseContext parseContext) throws IOException {
Query innerBig = big.toQuery(parseContext);
protected Query doToQuery(QueryShardContext context) throws IOException {
Query innerBig = big.toQuery(context);
assert innerBig instanceof SpanQuery;
Query innerLittle = little.toQuery(parseContext);
Query innerLittle = little.toQuery(context);
assert innerLittle instanceof SpanQuery;
return new SpanWithinQuery((SpanQuery) innerBig, (SpanQuery) innerLittle);
}

View File

@ -67,12 +67,13 @@ public class TemplateQueryParser extends BaseQueryParserTemp {
* values. Handles both submitting the template as part of the request as
* well as referencing only the template name.
*
* @param parseContext
* @param context
* parse context containing the templated query.
*/
@Override
@Nullable
public Query parse(QueryParseContext parseContext) throws IOException {
public Query parse(QueryShardContext context) throws IOException {
QueryParseContext parseContext = context.parseContext();
XContentParser parser = parseContext.parser();
Template template = parse(parser, parseContext.parseFieldMatcher());
ExecutableScript executable = this.scriptService.executable(template, ScriptContext.Standard.SEARCH);
@ -80,9 +81,9 @@ public class TemplateQueryParser extends BaseQueryParserTemp {
BytesReference querySource = (BytesReference) executable.run();
try (XContentParser qSourceParser = XContentFactory.xContent(querySource).createParser(querySource)) {
final QueryParseContext context = new QueryParseContext(parseContext.index(), parseContext.indexQueryParserService());
context.reset(qSourceParser);
return context.parseInnerQuery();
final QueryShardContext contextCopy = new QueryShardContext(context.index(), context.indexQueryParserService());
contextCopy.reset(qSourceParser);
return contextCopy.parseContext().parseInnerQuery();
}
}

View File

@ -71,11 +71,11 @@ public class TermQueryBuilder extends BaseTermQueryBuilder<TermQueryBuilder> {
}
@Override
public Query doToQuery(QueryParseContext parseContext) throws IOException {
public Query doToQuery(QueryShardContext context) throws IOException {
Query query = null;
MappedFieldType mapper = parseContext.fieldMapper(this.fieldName);
MappedFieldType mapper = context.fieldMapper(this.fieldName);
if (mapper != null) {
query = mapper.termQuery(this.value, parseContext);
query = mapper.termQuery(this.value, context);
}
if (query == null) {
query = new TermQuery(new Term(this.fieldName, BytesRefs.toBytesRef(this.value)));

View File

@ -70,7 +70,8 @@ public class TermsQueryParser extends BaseQueryParserTemp {
}
@Override
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
public Query parse(QueryShardContext context) throws IOException, QueryParsingException {
QueryParseContext parseContext = context.parseContext();
XContentParser parser = parseContext.parser();
String queryName = null;
@ -158,7 +159,7 @@ public class TermsQueryParser extends BaseQueryParserTemp {
throw new QueryParsingException(parseContext, "terms query requires a field name, followed by array of terms");
}
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
MappedFieldType fieldType = context.fieldMapper(fieldName);
if (fieldType != null) {
fieldName = fieldType.names().indexName();
}
@ -181,7 +182,7 @@ public class TermsQueryParser extends BaseQueryParserTemp {
Query query;
if (parseContext.isFilter()) {
if (fieldType != null) {
query = fieldType.termsQuery(terms, parseContext);
query = fieldType.termsQuery(terms, context);
} else {
BytesRef[] filterValues = new BytesRef[terms.size()];
for (int i = 0; i < filterValues.length; i++) {
@ -193,7 +194,7 @@ public class TermsQueryParser extends BaseQueryParserTemp {
BooleanQuery bq = new BooleanQuery();
for (Object term : terms) {
if (fieldType != null) {
bq.add(fieldType.termQuery(term, parseContext), Occur.SHOULD);
bq.add(fieldType.termQuery(term, context), Occur.SHOULD);
} else {
bq.add(new TermQuery(new Term(fieldName, BytesRefs.toBytesRef(term))), Occur.SHOULD);
}
@ -204,7 +205,7 @@ public class TermsQueryParser extends BaseQueryParserTemp {
query.setBoost(boost);
if (queryName != null) {
parseContext.addNamedQuery(queryName, query);
context.addNamedQuery(queryName, query);
}
return query;
}

View File

@ -67,10 +67,10 @@ public class TypeQueryBuilder extends AbstractQueryBuilder<TypeQueryBuilder> {
}
@Override
protected Query doToQuery(QueryParseContext parseContext) throws IOException {
protected Query doToQuery(QueryShardContext context) throws IOException {
Query filter;
//LUCENE 4 UPGRADE document mapper should use bytesref as well?
DocumentMapper documentMapper = parseContext.mapperService().documentMapper(type.utf8ToString());
DocumentMapper documentMapper = context.mapperService().documentMapper(type.utf8ToString());
if (documentMapper == null) {
filter = new TermQuery(new Term(TypeFieldMapper.NAME, type));
} else {

View File

@ -106,11 +106,11 @@ public class WildcardQueryBuilder extends AbstractQueryBuilder<WildcardQueryBuil
}
@Override
protected Query doToQuery(QueryParseContext parseContext) throws IOException {
protected Query doToQuery(QueryShardContext context) throws IOException {
String indexFieldName;
BytesRef valueBytes;
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
MappedFieldType fieldType = context.fieldMapper(fieldName);
if (fieldType != null) {
indexFieldName = fieldType.names().indexName();
valueBytes = fieldType.indexedValueForSearch(value);
@ -120,7 +120,7 @@ public class WildcardQueryBuilder extends AbstractQueryBuilder<WildcardQueryBuil
}
WildcardQuery query = new WildcardQuery(new Term(indexFieldName, valueBytes));
MultiTermQuery.RewriteMethod rewriteMethod = QueryParsers.parseRewriteMethod(parseContext.parseFieldMatcher(), rewrite, null);
MultiTermQuery.RewriteMethod rewriteMethod = QueryParsers.parseRewriteMethod(context.parseFieldMatcher(), rewrite, null);
QueryParsers.setRewriteMethod(query, rewriteMethod);
return query;
}

View File

@ -41,7 +41,8 @@ public class WrapperQueryParser extends BaseQueryParserTemp {
}
@Override
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
public Query parse(QueryShardContext context) throws IOException, QueryParsingException {
QueryParseContext parseContext = context.parseContext();
XContentParser parser = parseContext.parser();
XContentParser.Token token = parser.nextToken();
@ -56,11 +57,11 @@ public class WrapperQueryParser extends BaseQueryParserTemp {
byte[] querySource = parser.binaryValue();
try (XContentParser qSourceParser = XContentFactory.xContent(querySource).createParser(querySource)) {
final QueryParseContext context = new QueryParseContext(parseContext.index(), parseContext.indexQueryParserService());
context.reset(qSourceParser);
Query result = context.parseInnerQuery();
final QueryShardContext contextCopy = new QueryShardContext(context.index(), context.indexQueryParserService());
contextCopy.reset(qSourceParser);
Query result = contextCopy.parseContext().parseInnerQuery();
parser.nextToken();
parseContext.combineNamedQueries(context);
context.combineNamedQueries(contextCopy);
return result;
}
}

View File

@ -43,7 +43,7 @@ import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.core.DateFieldMapper;
import org.elasticsearch.index.mapper.core.NumberFieldMapper;
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.query.QueryParsingException;
import org.elasticsearch.index.query.functionscore.gauss.GaussDecayFunctionBuilder;
import org.elasticsearch.index.query.functionscore.gauss.GaussDecayFunctionParser;
@ -119,7 +119,7 @@ public abstract class DecayFunctionParser implements ScoreFunctionParser {
*
* */
@Override
public ScoreFunction parse(QueryParseContext parseContext, XContentParser parser) throws IOException, QueryParsingException {
public ScoreFunction parse(QueryShardContext context, XContentParser parser) throws IOException, QueryParsingException {
String currentFieldName;
XContentParser.Token token;
AbstractDistanceScoreFunction scoreFunction;
@ -132,7 +132,7 @@ public abstract class DecayFunctionParser implements ScoreFunctionParser {
if (token == XContentParser.Token.START_OBJECT) {
variableContent.copyCurrentStructure(parser);
fieldName = currentFieldName;
} else if (parseContext.parseFieldMatcher().match(currentFieldName, MULTI_VALUE_MODE)) {
} else if (context.parseFieldMatcher().match(currentFieldName, MULTI_VALUE_MODE)) {
multiValueMode = parser.text();
} else {
throw new ElasticsearchParseException("malformed score function score parameters.");
@ -142,34 +142,34 @@ public abstract class DecayFunctionParser implements ScoreFunctionParser {
throw new ElasticsearchParseException("malformed score function score parameters.");
}
XContentParser variableParser = XContentFactory.xContent(variableContent.string()).createParser(variableContent.string());
scoreFunction = parseVariable(fieldName, variableParser, parseContext, MultiValueMode.fromString(multiValueMode.toUpperCase(Locale.ROOT)));
scoreFunction = parseVariable(fieldName, variableParser, context, MultiValueMode.fromString(multiValueMode.toUpperCase(Locale.ROOT)));
return scoreFunction;
}
// parses origin and scale parameter for field "fieldName"
private AbstractDistanceScoreFunction parseVariable(String fieldName, XContentParser parser, QueryParseContext parseContext, MultiValueMode mode) throws IOException {
private AbstractDistanceScoreFunction parseVariable(String fieldName, XContentParser parser, QueryShardContext context, MultiValueMode mode) throws IOException {
// now, the field must exist, else we cannot read the value for
// the doc later
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
MappedFieldType fieldType = context.fieldMapper(fieldName);
if (fieldType == null) {
throw new QueryParsingException(parseContext, "unknown field [{}]", fieldName);
throw new QueryParsingException(context.parseContext(), "unknown field [{}]", fieldName);
}
// dates and time need special handling
parser.nextToken();
if (fieldType instanceof DateFieldMapper.DateFieldType) {
return parseDateVariable(fieldName, parser, parseContext, (DateFieldMapper.DateFieldType) fieldType, mode);
return parseDateVariable(fieldName, parser, context, (DateFieldMapper.DateFieldType) fieldType, mode);
} else if (fieldType instanceof GeoPointFieldMapper.GeoPointFieldType) {
return parseGeoVariable(fieldName, parser, parseContext, (GeoPointFieldMapper.GeoPointFieldType) fieldType, mode);
return parseGeoVariable(fieldName, parser, context, (GeoPointFieldMapper.GeoPointFieldType) fieldType, mode);
} else if (fieldType instanceof NumberFieldMapper.NumberFieldType) {
return parseNumberVariable(fieldName, parser, parseContext, (NumberFieldMapper.NumberFieldType) fieldType, mode);
return parseNumberVariable(fieldName, parser, context, (NumberFieldMapper.NumberFieldType) fieldType, mode);
} else {
throw new QueryParsingException(parseContext, "field [{}] is of type [{}], but only numeric types are supported.", fieldName, fieldType);
throw new QueryParsingException(context.parseContext(), "field [{}] is of type [{}], but only numeric types are supported.", fieldName, fieldType);
}
}
private AbstractDistanceScoreFunction parseNumberVariable(String fieldName, XContentParser parser, QueryParseContext parseContext,
private AbstractDistanceScoreFunction parseNumberVariable(String fieldName, XContentParser parser, QueryShardContext context,
NumberFieldMapper.NumberFieldType fieldType, MultiValueMode mode) throws IOException {
XContentParser.Token token;
String parameterName = null;
@ -199,11 +199,11 @@ public abstract class DecayFunctionParser implements ScoreFunctionParser {
if (!scaleFound || !refFound) {
throw new ElasticsearchParseException("both [{}] and [{}] must be set for numeric fields.", DecayFunctionBuilder.SCALE, DecayFunctionBuilder.ORIGIN);
}
IndexNumericFieldData numericFieldData = parseContext.getForField(fieldType);
IndexNumericFieldData numericFieldData = context.getForField(fieldType);
return new NumericFieldDataScoreFunction(origin, scale, decay, offset, getDecayFunction(), numericFieldData, mode);
}
private AbstractDistanceScoreFunction parseGeoVariable(String fieldName, XContentParser parser, QueryParseContext parseContext,
private AbstractDistanceScoreFunction parseGeoVariable(String fieldName, XContentParser parser, QueryShardContext context,
GeoPointFieldMapper.GeoPointFieldType fieldType, MultiValueMode mode) throws IOException {
XContentParser.Token token;
String parameterName = null;
@ -231,12 +231,12 @@ public abstract class DecayFunctionParser implements ScoreFunctionParser {
}
double scale = DistanceUnit.DEFAULT.parse(scaleString, DistanceUnit.DEFAULT);
double offset = DistanceUnit.DEFAULT.parse(offsetString, DistanceUnit.DEFAULT);
IndexGeoPointFieldData indexFieldData = parseContext.getForField(fieldType);
IndexGeoPointFieldData indexFieldData = context.getForField(fieldType);
return new GeoFieldDataScoreFunction(origin, scale, decay, offset, getDecayFunction(), indexFieldData, mode);
}
private AbstractDistanceScoreFunction parseDateVariable(String fieldName, XContentParser parser, QueryParseContext parseContext,
private AbstractDistanceScoreFunction parseDateVariable(String fieldName, XContentParser parser, QueryShardContext context,
DateFieldMapper.DateFieldType dateFieldType, MultiValueMode mode) throws IOException {
XContentParser.Token token;
String parameterName = null;
@ -271,7 +271,7 @@ public abstract class DecayFunctionParser implements ScoreFunctionParser {
double scale = val.getMillis();
val = TimeValue.parseTimeValue(offsetString, TimeValue.timeValueHours(24), getClass().getSimpleName() + ".offset");
double offset = val.getMillis();
IndexNumericFieldData numericFieldData = parseContext.getForField(dateFieldType);
IndexNumericFieldData numericFieldData = context.getForField(dateFieldType);
return new NumericFieldDataScoreFunction(origin, scale, decay, offset, getDecayFunction(), numericFieldData, mode);
}

View File

@ -82,7 +82,8 @@ public class FunctionScoreQueryParser implements QueryParser {
}
@Override
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
public Query parse(QueryShardContext context) throws IOException, QueryParsingException {
QueryParseContext parseContext = context.parseContext();
XContentParser parser = parseContext.parser();
Query query = null;
@ -127,7 +128,7 @@ public class FunctionScoreQueryParser implements QueryParser {
String errorString = "already found [" + singleFunctionName + "], now encountering [functions].";
handleMisplacedFunctionsDeclaration(errorString, singleFunctionName);
}
currentFieldName = parseFiltersAndFunctions(parseContext, parser, filterFunctions, currentFieldName);
currentFieldName = parseFiltersAndFunctions(context, parser, filterFunctions, currentFieldName);
functionArrayFound = true;
} else {
ScoreFunction scoreFunction;
@ -138,7 +139,7 @@ public class FunctionScoreQueryParser implements QueryParser {
// we try to parse a score function. If there is no score
// function for the current field name,
// functionParserMapper.get() will throw an Exception.
scoreFunction = functionParserMapper.get(parseContext, currentFieldName).parse(parseContext, parser);
scoreFunction = functionParserMapper.get(parseContext, currentFieldName).parse(context, parser);
}
if (functionArrayFound) {
String errorString = "already found [functions] array, now encountering [" + currentFieldName + "].";
@ -191,7 +192,7 @@ public class FunctionScoreQueryParser implements QueryParser {
}
result.setBoost(boost);
if (queryName != null) {
parseContext.addNamedQuery(queryName, query);
context.addNamedQuery(queryName, query);
}
return result;
}
@ -204,8 +205,9 @@ public class FunctionScoreQueryParser implements QueryParser {
throw new ElasticsearchParseException("failed to parse [{}] query. [{}]", NAME, errorString);
}
private String parseFiltersAndFunctions(QueryParseContext parseContext, XContentParser parser,
private String parseFiltersAndFunctions(QueryShardContext context, XContentParser parser,
ArrayList<FiltersFunctionScoreQuery.FilterFunction> filterFunctions, String currentFieldName) throws IOException {
QueryParseContext parseContext = context.parseContext();
XContentParser.Token token;
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
Query filter = null;
@ -227,7 +229,7 @@ public class FunctionScoreQueryParser implements QueryParser {
// functionParserMapper throws exception if parser
// non-existent
ScoreFunctionParser functionParser = functionParserMapper.get(parseContext, currentFieldName);
scoreFunction = functionParser.parse(parseContext, parser);
scoreFunction = functionParser.parse(context, parser);
}
}
}
@ -275,9 +277,10 @@ public class FunctionScoreQueryParser implements QueryParser {
return cf;
}
//norelease to be removed once all queries are moved over to extend BaseQueryParser
@Override
public QueryBuilder fromXContent(QueryParseContext parseContext) throws IOException, QueryParsingException {
Query query = parse(parseContext);
Query query = parse(parseContext.shardContext());
return new QueryWrappingQueryBuilder(query);
}

View File

@ -21,14 +21,14 @@ package org.elasticsearch.index.query.functionscore;
import org.elasticsearch.common.lucene.search.function.ScoreFunction;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.query.QueryParsingException;
import java.io.IOException;
public interface ScoreFunctionParser {
ScoreFunction parse(QueryParseContext parseContext, XContentParser parser) throws IOException, QueryParsingException;
ScoreFunction parse(QueryShardContext context, XContentParser parser) throws IOException, QueryParsingException;
/**
* Returns the name of the function, for example "linear", "gauss" etc. This

View File

@ -19,14 +19,13 @@
package org.elasticsearch.index.query.functionscore.factor;
import org.elasticsearch.index.query.functionscore.ScoreFunctionParser;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.search.function.BoostScoreFunction;
import org.elasticsearch.common.lucene.search.function.ScoreFunction;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.query.QueryParsingException;
import org.elasticsearch.index.query.functionscore.ScoreFunctionParser;
import java.io.IOException;
@ -43,7 +42,7 @@ public class FactorParser implements ScoreFunctionParser {
}
@Override
public ScoreFunction parse(QueryParseContext parseContext, XContentParser parser) throws IOException, QueryParsingException {
public ScoreFunction parse(QueryShardContext context, XContentParser parser) throws IOException, QueryParsingException {
float boostFactor = parser.floatValue();
return new BoostScoreFunction(boostFactor);
}

View File

@ -24,8 +24,8 @@ import org.elasticsearch.common.lucene.search.function.FieldValueFactorFunction;
import org.elasticsearch.common.lucene.search.function.ScoreFunction;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryParsingException;
import org.elasticsearch.index.query.functionscore.ScoreFunctionParser;
@ -52,7 +52,8 @@ public class FieldValueFactorFunctionParser implements ScoreFunctionParser {
public static String[] NAMES = { "field_value_factor", "fieldValueFactor" };
@Override
public ScoreFunction parse(QueryParseContext parseContext, XContentParser parser) throws IOException, QueryParsingException {
public ScoreFunction parse(QueryShardContext context, XContentParser parser) throws IOException, QueryParsingException {
QueryParseContext parseContext = context.parseContext();
String currentFieldName = null;
String field = null;

View File

@ -27,8 +27,8 @@ import org.elasticsearch.common.lucene.search.function.RandomScoreFunction;
import org.elasticsearch.common.lucene.search.function.ScoreFunction;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryParsingException;
import org.elasticsearch.index.query.functionscore.ScoreFunctionParser;
@ -51,8 +51,8 @@ public class RandomScoreFunctionParser implements ScoreFunctionParser {
}
@Override
public ScoreFunction parse(QueryParseContext parseContext, XContentParser parser) throws IOException, QueryParsingException {
public ScoreFunction parse(QueryShardContext context, XContentParser parser) throws IOException, QueryParsingException {
QueryParseContext parseContext = context.parseContext();
int seed = -1;
String currentFieldName = null;
@ -90,7 +90,7 @@ public class RandomScoreFunctionParser implements ScoreFunctionParser {
}
if (seed == -1) {
seed = Longs.hashCode(parseContext.nowInMillis());
seed = Longs.hashCode(context.nowInMillis());
}
final ShardId shardId = SearchContext.current().indexShard().shardId();
final int salt = (shardId.index().name().hashCode() << 10) | shardId.id();

View File

@ -21,11 +21,11 @@
package org.elasticsearch.index.query.functionscore.script;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.search.function.ScoreFunction;
import org.elasticsearch.common.lucene.search.function.ScriptScoreFunction;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryParsingException;
import org.elasticsearch.index.query.functionscore.ScoreFunctionParser;
@ -58,7 +58,8 @@ public class ScriptScoreFunctionParser implements ScoreFunctionParser {
}
@Override
public ScoreFunction parse(QueryParseContext parseContext, XContentParser parser) throws IOException, QueryParsingException {
public ScoreFunction parse(QueryShardContext context, XContentParser parser) throws IOException, QueryParsingException {
QueryParseContext parseContext = context.parseContext();
ScriptParameterParser scriptParameterParser = new ScriptParameterParser();
Script script = null;
Map<String, Object> vars = null;
@ -100,7 +101,7 @@ public class ScriptScoreFunctionParser implements ScoreFunctionParser {
SearchScript searchScript;
try {
searchScript = parseContext.scriptService().search(parseContext.lookup(), script, ScriptContext.Standard.SEARCH);
searchScript = context.scriptService().search(context.lookup(), script, ScriptContext.Standard.SEARCH);
return new ScriptScoreFunction(script, searchScript);
} catch (Exception e) {
throw new QueryParsingException(parseContext, NAMES[0] + " the script could not be loaded", e);

View File

@ -22,6 +22,7 @@ package org.elasticsearch.index.query.support;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryShardException;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryParsingException;
import org.elasticsearch.search.fetch.fielddata.FieldDataFieldsParseElement;
@ -51,7 +52,7 @@ public class InnerHitsQueryParserHelper {
this.fieldDataFieldsParseElement = fieldDataFieldsParseElement;
}
public Tuple<String, SubSearchContext> parse(QueryParseContext parserContext) throws IOException, QueryParsingException {
public Tuple<String, SubSearchContext> parse(QueryParseContext parserContext) throws IOException, QueryShardException {
String fieldName = null;
XContentParser.Token token;
String innerHitName = null;

View File

@ -26,10 +26,10 @@ import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.object.ObjectMapper;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.query.QueryShardException;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryParsingException;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
@ -41,6 +41,7 @@ import java.io.IOException;
*/
public class NestedInnerQueryParseSupport {
protected final QueryShardContext shardContext;
protected final QueryParseContext parseContext;
private BytesReference source;
@ -60,12 +61,15 @@ public class NestedInnerQueryParseSupport {
private ObjectMapper parentObjectMapper;
public NestedInnerQueryParseSupport(XContentParser parser, SearchContext searchContext) {
parseContext = searchContext.queryParserService().getParseContext();
parseContext.reset(parser);
parseContext = searchContext.queryParserService().getShardContext().parseContext();
shardContext = searchContext.queryParserService().getShardContext();
shardContext.reset(parser);
}
public NestedInnerQueryParseSupport(QueryParseContext parseContext) {
this.parseContext = parseContext;
public NestedInnerQueryParseSupport(QueryShardContext context) {
this.parseContext = context.parseContext();
this.shardContext = context;
}
public void query() throws IOException {
@ -103,10 +107,10 @@ public class NestedInnerQueryParseSupport {
return innerQuery;
} else {
if (path == null) {
throw new QueryParsingException(parseContext, "[nested] requires 'path' field");
throw new QueryShardException(shardContext, "[nested] requires 'path' field");
}
if (!queryFound) {
throw new QueryParsingException(parseContext, "[nested] requires either 'query' or 'filter' field");
throw new QueryShardException(shardContext, "[nested] requires either 'query' or 'filter' field");
}
XContentParser old = parseContext.parser();
@ -132,10 +136,10 @@ public class NestedInnerQueryParseSupport {
return innerFilter;
} else {
if (path == null) {
throw new QueryParsingException(parseContext, "[nested] requires 'path' field");
throw new QueryShardException(shardContext, "[nested] requires 'path' field");
}
if (!filterFound) {
throw new QueryParsingException(parseContext, "[nested] requires either 'query' or 'filter' field");
throw new QueryShardException(shardContext, "[nested] requires either 'query' or 'filter' field");
}
setPathLevel();
@ -155,12 +159,12 @@ public class NestedInnerQueryParseSupport {
public void setPath(String path) {
this.path = path;
nestedObjectMapper = parseContext.getObjectMapper(path);
nestedObjectMapper = shardContext.getObjectMapper(path);
if (nestedObjectMapper == null) {
throw new QueryParsingException(parseContext, "[nested] failed to find nested object under path [" + path + "]");
throw new QueryShardException(shardContext, "[nested] failed to find nested object under path [" + path + "]");
}
if (!nestedObjectMapper.nested().isNested()) {
throw new QueryParsingException(parseContext, "[nested] nested object under path [" + path + "] is not of nested type");
throw new QueryShardException(shardContext, "[nested] nested object under path [" + path + "] is not of nested type");
}
}
@ -185,18 +189,18 @@ public class NestedInnerQueryParseSupport {
}
private void setPathLevel() {
ObjectMapper objectMapper = parseContext.nestedScope().getObjectMapper();
ObjectMapper objectMapper = shardContext.nestedScope().getObjectMapper();
if (objectMapper == null) {
parentFilter = parseContext.bitsetFilter(Queries.newNonNestedFilter());
parentFilter = shardContext.bitsetFilter(Queries.newNonNestedFilter());
} else {
parentFilter = parseContext.bitsetFilter(objectMapper.nestedTypeFilter());
parentFilter = shardContext.bitsetFilter(objectMapper.nestedTypeFilter());
}
childFilter = parseContext.bitsetFilter(nestedObjectMapper.nestedTypeFilter());
parentObjectMapper = parseContext.nestedScope().nextLevel(nestedObjectMapper);
childFilter = shardContext.bitsetFilter(nestedObjectMapper.nestedTypeFilter());
parentObjectMapper = shardContext.nestedScope().nextLevel(nestedObjectMapper);
}
private void resetPathLevel() {
parseContext.nestedScope().previousLevel();
shardContext.nestedScope().previousLevel();
}
}

View File

@ -25,6 +25,7 @@ import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.query.QueryParseContext;
import java.io.IOException;
@ -84,14 +85,14 @@ public abstract class XContentStructure {
BytesReference br = this.bytes();
assert br != null : "innerBytes must be set with .bytes(bytes) or .freeze() before parsing";
XContentParser innerParser = XContentHelper.createParser(br);
String[] origTypes = QueryParseContext.setTypesWithPrevious(types);
String[] origTypes = QueryShardContext.setTypesWithPrevious(types);
XContentParser old = parseContext.parser();
parseContext.parser(innerParser);
try {
return parseContext.parseInnerQuery();
} finally {
parseContext.parser(old);
QueryParseContext.setTypes(origTypes);
QueryShardContext.setTypes(origTypes);
}
}
@ -106,12 +107,12 @@ public abstract class XContentStructure {
public InnerQuery(QueryParseContext parseContext1, @Nullable String... types) throws IOException {
super(parseContext1);
if (types != null) {
String[] origTypes = QueryParseContext.setTypesWithPrevious(types);
String[] origTypes = QueryShardContext.setTypesWithPrevious(types);
try {
query = parseContext1.parseInnerQuery();
queryParsed = true;
} finally {
QueryParseContext.setTypes(origTypes);
QueryShardContext.setTypes(origTypes);
}
} else {
BytesReference innerBytes = XContentFactory.smileBuilder().copyCurrentStructure(parseContext1.parser()).bytes();

View File

@ -30,7 +30,7 @@ import org.elasticsearch.common.lucene.search.MultiPhrasePrefixQuery;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.query.support.QueryParsers;
import java.io.IOException;
@ -49,7 +49,7 @@ public class MatchQuery {
ALL
}
protected final QueryParseContext parseContext;
protected final QueryShardContext context;
protected String analyzer;
@ -75,8 +75,8 @@ public class MatchQuery {
protected Float commonTermsCutoff = null;
public MatchQuery(QueryParseContext parseContext) {
this.parseContext = parseContext;
public MatchQuery(QueryShardContext context) {
this.context = context;
}
public void setAnalyzer(String analyzer) {
@ -134,11 +134,11 @@ public class MatchQuery {
protected Analyzer getAnalyzer(MappedFieldType fieldType) {
if (this.analyzer == null) {
if (fieldType != null) {
return parseContext.getSearchAnalyzer(fieldType);
return context.getSearchAnalyzer(fieldType);
}
return parseContext.mapperService().searchAnalyzer();
return context.mapperService().searchAnalyzer();
} else {
Analyzer analyzer = parseContext.mapperService().analysisService().analyzer(this.analyzer);
Analyzer analyzer = context.mapperService().analysisService().analyzer(this.analyzer);
if (analyzer == null) {
throw new IllegalArgumentException("No analyzer found for [" + this.analyzer + "]");
}
@ -148,7 +148,7 @@ public class MatchQuery {
public Query parse(Type type, String fieldName, Object value) throws IOException {
final String field;
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
MappedFieldType fieldType = context.fieldMapper(fieldName);
if (fieldType != null) {
field = fieldType.names().indexName();
} else {
@ -157,7 +157,7 @@ public class MatchQuery {
if (fieldType != null && fieldType.useTermQueryWithQueryString() && !forceAnalyzeQueryString()) {
try {
return fieldType.termQuery(value, parseContext);
return fieldType.termQuery(value, context);
} catch (RuntimeException e) {
if (lenient) {
return null;

View File

@ -29,10 +29,9 @@ import org.apache.lucene.search.Query;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.query.MultiMatchQueryBuilder;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext;
import java.io.IOException;
import java.util.ArrayList;
@ -48,8 +47,8 @@ public class MultiMatchQuery extends MatchQuery {
this.groupTieBreaker = tieBreaker;
}
public MultiMatchQuery(QueryParseContext parseContext) {
super(parseContext);
public MultiMatchQuery(QueryShardContext context) {
super(context);
}
private Query parseAndApply(Type type, String fieldName, Object value, String minimumShouldMatch, Float boostValue) throws IOException {
@ -163,7 +162,7 @@ public class MultiMatchQuery extends MatchQuery {
List<Tuple<String, Float>> missing = new ArrayList<>();
for (Map.Entry<String, Float> entry : fieldNames.entrySet()) {
String name = entry.getKey();
MappedFieldType fieldType = parseContext.fieldMapper(name);
MappedFieldType fieldType = context.fieldMapper(name);
if (fieldType != null) {
Analyzer actualAnalyzer = getAnalyzer(fieldType);
name = fieldType.names().indexName();

View File

@ -35,7 +35,12 @@ import org.elasticsearch.index.aliases.IndexAliasesService;
import org.elasticsearch.index.cache.IndexCache;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.engine.IgnoreOnRecoveryEngineException;
import org.elasticsearch.index.mapper.*;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.MapperException;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.MapperUtils;
import org.elasticsearch.index.mapper.Mapping;
import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.index.query.IndexQueryParserService;
import org.elasticsearch.index.query.ParsedQuery;
import org.elasticsearch.index.query.QueryParsingException;

View File

@ -28,7 +28,7 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryParsingException;
import org.elasticsearch.index.query.QueryShardException;
import java.io.IOException;
@ -115,7 +115,7 @@ public class GND extends NXYSignificanceHeuristic {
}
@Override
public SignificanceHeuristic parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher) throws IOException, QueryParsingException {
public SignificanceHeuristic parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher) throws IOException, QueryShardException {
String givenName = parser.currentName();
boolean backgroundIsSuperset = true;
XContentParser.Token token = parser.nextToken();

View File

@ -27,7 +27,7 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryParsingException;
import org.elasticsearch.index.query.QueryShardException;
import java.io.IOException;
@ -108,7 +108,7 @@ public class JLHScore extends SignificanceHeuristic {
public static class JLHScoreParser implements SignificanceHeuristicParser {
@Override
public SignificanceHeuristic parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher) throws IOException, QueryParsingException {
public SignificanceHeuristic parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher) throws IOException, QueryShardException {
// move to the closing bracket
if (!parser.nextToken().equals(XContentParser.Token.END_OBJECT)) {
throw new ElasticsearchParseException("failed to parse [jhl] significance heuristic. expected an empty object, but found [{}] instead", parser.currentToken());

View File

@ -27,7 +27,7 @@ import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryParsingException;
import org.elasticsearch.index.query.QueryShardException;
import java.io.IOException;
@ -138,7 +138,7 @@ public abstract class NXYSignificanceHeuristic extends SignificanceHeuristic {
public static abstract class NXYParser implements SignificanceHeuristicParser {
@Override
public SignificanceHeuristic parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher) throws IOException, QueryParsingException {
public SignificanceHeuristic parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher) throws IOException, QueryShardException {
String givenName = parser.currentName();
boolean includeNegatives = false;
boolean backgroundIsSuperset = true;

View File

@ -27,7 +27,7 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryParsingException;
import org.elasticsearch.index.query.QueryShardException;
import java.io.IOException;
@ -77,7 +77,7 @@ public class PercentageScore extends SignificanceHeuristic {
public static class PercentageScoreParser implements SignificanceHeuristicParser {
@Override
public SignificanceHeuristic parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher) throws IOException, QueryParsingException {
public SignificanceHeuristic parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher) throws IOException, QueryShardException {
// move to the closing bracket
if (!parser.nextToken().equals(XContentParser.Token.END_OBJECT)) {
throw new ElasticsearchParseException("failed to parse [percentage] significance heuristic. expected an empty object, but got [{}] instead", parser.currentToken());

View File

@ -30,7 +30,7 @@ import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.logging.ESLoggerFactory;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryParsingException;
import org.elasticsearch.index.query.QueryShardException;
import org.elasticsearch.script.*;
import org.elasticsearch.script.Script.ScriptField;
import org.elasticsearch.script.ScriptParameterParser.ScriptParameterValue;
@ -130,7 +130,7 @@ public class ScriptHeuristic extends SignificanceHeuristic {
}
@Override
public SignificanceHeuristic parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher) throws IOException, QueryParsingException {
public SignificanceHeuristic parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher) throws IOException, QueryShardException {
String heuristicName = parser.currentName();
Script script = null;
XContentParser.Token token;

View File

@ -24,7 +24,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.object.ObjectMapper;
import org.elasticsearch.index.query.ParsedQuery;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.SearchParseElement;
import org.elasticsearch.search.fetch.fielddata.FieldDataFieldsParseElement;
import org.elasticsearch.search.fetch.script.ScriptFieldsParseElement;
@ -59,15 +59,15 @@ public class InnerHitsParseElement implements SearchParseElement {
@Override
public void parse(XContentParser parser, SearchContext searchContext) throws Exception {
QueryParseContext parseContext = searchContext.queryParserService().getParseContext();
parseContext.reset(parser);
Map<String, InnerHitsContext.BaseInnerHits> innerHitsMap = parseInnerHits(parser, parseContext, searchContext);
QueryShardContext context = searchContext.queryParserService().getShardContext();
context.reset(parser);
Map<String, InnerHitsContext.BaseInnerHits> innerHitsMap = parseInnerHits(parser, context, searchContext);
if (innerHitsMap != null) {
searchContext.innerHits(new InnerHitsContext(innerHitsMap));
}
}
private Map<String, InnerHitsContext.BaseInnerHits> parseInnerHits(XContentParser parser, QueryParseContext parseContext, SearchContext searchContext) throws Exception {
private Map<String, InnerHitsContext.BaseInnerHits> parseInnerHits(XContentParser parser, QueryShardContext context, SearchContext searchContext) throws Exception {
XContentParser.Token token;
Map<String, InnerHitsContext.BaseInnerHits> innerHitsMap = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
@ -79,7 +79,7 @@ public class InnerHitsParseElement implements SearchParseElement {
if (token != XContentParser.Token.START_OBJECT) {
throw new IllegalArgumentException("Inner hit definition for [" + innerHitName + " starts with a [" + token + "], expected a [" + XContentParser.Token.START_OBJECT + "].");
}
InnerHitsContext.BaseInnerHits innerHits = parseInnerHit(parser, parseContext, searchContext, innerHitName);
InnerHitsContext.BaseInnerHits innerHits = parseInnerHit(parser, context, searchContext, innerHitName);
if (innerHitsMap == null) {
innerHitsMap = new HashMap<>();
}
@ -88,7 +88,7 @@ public class InnerHitsParseElement implements SearchParseElement {
return innerHitsMap;
}
private InnerHitsContext.BaseInnerHits parseInnerHit(XContentParser parser, QueryParseContext parseContext, SearchContext searchContext, String innerHitName) throws Exception {
private InnerHitsContext.BaseInnerHits parseInnerHit(XContentParser parser, QueryShardContext context, SearchContext searchContext, String innerHitName) throws Exception {
XContentParser.Token token = parser.nextToken();
if (token != XContentParser.Token.FIELD_NAME) {
throw new IllegalArgumentException("Unexpected token " + token + " inside inner hit definition. Either specify [path] or [type] object");
@ -123,9 +123,9 @@ public class InnerHitsParseElement implements SearchParseElement {
final InnerHitsContext.BaseInnerHits innerHits;
if (nestedPath != null) {
innerHits = parseNested(parser, parseContext, searchContext, fieldName);
innerHits = parseNested(parser, context, searchContext, fieldName);
} else if (type != null) {
innerHits = parseParentChild(parser, parseContext, searchContext, fieldName);
innerHits = parseParentChild(parser, context, searchContext, fieldName);
} else {
throw new IllegalArgumentException("Either [path] or [type] must be defined");
}
@ -143,16 +143,16 @@ public class InnerHitsParseElement implements SearchParseElement {
return innerHits;
}
private InnerHitsContext.ParentChildInnerHits parseParentChild(XContentParser parser, QueryParseContext parseContext, SearchContext searchContext, String type) throws Exception {
ParseResult parseResult = parseSubSearchContext(searchContext, parseContext, parser);
private InnerHitsContext.ParentChildInnerHits parseParentChild(XContentParser parser, QueryShardContext context, SearchContext searchContext, String type) throws Exception {
ParseResult parseResult = parseSubSearchContext(searchContext, context, parser);
DocumentMapper documentMapper = searchContext.mapperService().documentMapper(type);
if (documentMapper == null) {
throw new IllegalArgumentException("type [" + type + "] doesn't exist");
}
return new InnerHitsContext.ParentChildInnerHits(parseResult.context(), parseResult.query(), parseResult.childInnerHits(), parseContext.mapperService(), documentMapper);
return new InnerHitsContext.ParentChildInnerHits(parseResult.context(), parseResult.query(), parseResult.childInnerHits(), context.mapperService(), documentMapper);
}
private InnerHitsContext.NestedInnerHits parseNested(XContentParser parser, QueryParseContext parseContext, SearchContext searchContext, String nestedPath) throws Exception {
private InnerHitsContext.NestedInnerHits parseNested(XContentParser parser, QueryShardContext context, SearchContext searchContext, String nestedPath) throws Exception {
ObjectMapper objectMapper = searchContext.getObjectMapper(nestedPath);
if (objectMapper == null) {
throw new IllegalArgumentException("path [" + nestedPath +"] doesn't exist");
@ -160,14 +160,14 @@ public class InnerHitsParseElement implements SearchParseElement {
if (objectMapper.nested().isNested() == false) {
throw new IllegalArgumentException("path [" + nestedPath +"] isn't nested");
}
ObjectMapper parentObjectMapper = parseContext.nestedScope().nextLevel(objectMapper);
ParseResult parseResult = parseSubSearchContext(searchContext, parseContext, parser);
parseContext.nestedScope().previousLevel();
ObjectMapper parentObjectMapper = context.nestedScope().nextLevel(objectMapper);
ParseResult parseResult = parseSubSearchContext(searchContext, context, parser);
context.nestedScope().previousLevel();
return new InnerHitsContext.NestedInnerHits(parseResult.context(), parseResult.query(), parseResult.childInnerHits(), parentObjectMapper, objectMapper);
}
private ParseResult parseSubSearchContext(SearchContext searchContext, QueryParseContext parseContext, XContentParser parser) throws Exception {
private ParseResult parseSubSearchContext(SearchContext searchContext, QueryShardContext context, XContentParser parser) throws Exception {
ParsedQuery query = null;
Map<String, InnerHitsContext.BaseInnerHits> childInnerHits = null;
SubSearchContext subSearchContext = new SubSearchContext(searchContext);
@ -178,10 +178,10 @@ public class InnerHitsParseElement implements SearchParseElement {
fieldName = parser.currentName();
} else if (token == XContentParser.Token.START_OBJECT) {
if ("query".equals(fieldName)) {
Query q = searchContext.queryParserService().parseInnerQuery(parseContext);
query = new ParsedQuery(q, parseContext.copyNamedQueries());
Query q = searchContext.queryParserService().parseInnerQuery(context);
query = new ParsedQuery(q, context.copyNamedQueries());
} else if ("inner_hits".equals(fieldName)) {
childInnerHits = parseInnerHits(parser, parseContext, searchContext);
childInnerHits = parseInnerHits(parser, context, searchContext);
} else {
parseCommonInnerHitOptions(parser, token, fieldName, subSearchContext, sortParseElement, sourceParseElement, highlighterParseElement, scriptFieldsParseElement, fieldDataFieldsParseElement);
}

View File

@ -41,7 +41,7 @@ import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.object.ObjectMapper;
import org.elasticsearch.index.query.IndexQueryParserService;
import org.elasticsearch.index.query.ParsedQuery;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.script.ScriptService;
@ -74,12 +74,12 @@ public abstract class SearchContext implements Releasable, HasContextAndHeaders
public static void setCurrent(SearchContext value) {
current.set(value);
QueryParseContext.setTypes(value.types());
QueryShardContext.setTypes(value.types());
}
public static void removeCurrent() {
current.remove();
QueryParseContext.removeTypes();
QueryShardContext.removeTypes();
}
public static SearchContext current() {

Some files were not shown because too many files have changed in this diff Show More