Consolidate query generation in QueryShardContext

Currently we have a lot of methods left in QueryShardContext that
take parsers or BytesReference arguments to do some xContent
parsing on the shard. While this still seems necessary in some cases
(e.g. percolation, phrase suggester), the shard context should only
be concerned with generating lucene queries from QueryBuilders.

This change removes all of the parseX() methods in favour of two
public methods toQuery(QueryBuilder) and toFilter(QueryBuilder) that
either call the query builders toFilter() or toQuery() method and
move all code required for parsing out to the respective callers.
This commit is contained in:
Christoph Büscher 2016-05-03 16:16:29 +02:00
parent 8d3427b44d
commit 223d67df4a
10 changed files with 55 additions and 105 deletions

View File

@ -431,6 +431,7 @@ public final class IndexService extends AbstractIndexComponent implements IndexC
return nodeServicesProvider;
}
@Override
public IndexSettings getIndexSettings() {
return indexSettings;
}
@ -598,18 +599,18 @@ public final class IndexService extends AbstractIndexComponent implements IndexC
}
}
private Query parse(AliasMetaData alias, QueryShardContext parseContext) {
private Query parse(AliasMetaData alias, QueryShardContext shardContext) {
if (alias.filter() == null) {
return null;
}
try {
byte[] filterSource = alias.filter().uncompressed();
try (XContentParser parser = XContentFactory.xContent(filterSource).createParser(filterSource)) {
ParsedQuery parsedFilter = parseContext.parseInnerFilter(parser);
ParsedQuery parsedFilter = shardContext.toFilter(shardContext.newParseContext(parser).parseInnerQueryBuilder());
return parsedFilter == null ? null : parsedFilter.query();
}
} catch (IOException ex) {
throw new AliasFilterParsingException(parseContext.index(), alias.getAlias(), "Invalid alias filter", ex);
throw new AliasFilterParsingException(shardContext.index(), alias.getAlias(), "Invalid alias filter", ex);
}
}
@ -759,6 +760,7 @@ public final class IndexService extends AbstractIndexComponent implements IndexC
return scheduledFuture != null;
}
@Override
public final void run() {
try {
runInternal();
@ -824,6 +826,7 @@ public final class IndexService extends AbstractIndexComponent implements IndexC
super(indexService, indexService.getIndexSettings().getTranslogSyncInterval());
}
@Override
protected String getThreadPool() {
return ThreadPool.Names.FLUSH;
}
@ -849,6 +852,7 @@ public final class IndexService extends AbstractIndexComponent implements IndexC
indexService.maybeRefreshEngine();
}
@Override
protected String getThreadPool() {
return ThreadPool.Names.REFRESH;
}

View File

@ -224,7 +224,7 @@ public class PercolatorFieldMapper extends FieldMapper {
return queryBuilder.toQuery(context);
}
static QueryBuilder<?> parseQueryBuilder(QueryParseContext context, XContentLocation location) {
private static QueryBuilder parseQueryBuilder(QueryParseContext context, XContentLocation location) {
try {
return context.parseInnerQueryBuilder();
} catch (IOException e) {

View File

@ -239,14 +239,14 @@ public abstract class AbstractQueryBuilder<QB extends AbstractQueryBuilder<QB>>
return getWriteableName();
}
protected final void writeQueries(StreamOutput out, List<? extends QueryBuilder<?>> queries) throws IOException {
protected final static void writeQueries(StreamOutput out, List<? extends QueryBuilder> queries) throws IOException {
out.writeVInt(queries.size());
for (QueryBuilder<?> query : queries) {
out.writeNamedWriteable(query);
}
}
protected final List<QueryBuilder<?>> readQueries(StreamInput in) throws IOException {
protected final static List<QueryBuilder<?>> readQueries(StreamInput in) throws IOException {
List<QueryBuilder<?>> queries = new ArrayList<>();
int size = in.readVInt();
for (int i = 0; i < size; i++) {

View File

@ -26,7 +26,6 @@ import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.queryparser.classic.MapperQueryParser;
@ -37,13 +36,9 @@ import org.apache.lucene.search.similarities.Similarity;
import org.elasticsearch.Version;
import org.elasticsearch.client.Client;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.analysis.AnalysisService;
@ -292,84 +287,45 @@ public class QueryShardContext extends QueryRewriteContext {
return false;
}
public ParsedQuery parse(BytesReference source) {
XContentParser parser = null;
try {
parser = XContentFactory.xContent(source).createParser(source);
return innerParse(parser);
} catch (ParsingException e) {
throw e;
} catch (Exception e) {
throw new ParsingException(parser == null ? null : parser.getTokenLocation(), "Failed to parse", e);
} finally {
if (parser != null) {
parser.close();
}
}
}
public ParsedQuery parse(XContentParser parser) {
try {
return innerParse(parser);
} catch(IOException e) {
throw new ParsingException(parser.getTokenLocation(), "Failed to parse", e);
}
}
/**
* Parses an inner filter, returning null if the filter should be ignored.
*/
@Nullable
public ParsedQuery parseInnerFilter(XContentParser parser) throws IOException {
reset();
try {
Query filter = QueryBuilder.rewriteQuery(newParseContext(parser).parseInnerQueryBuilder(), this).toFilter(this);
public ParsedQuery toFilter(QueryBuilder queryBuilder) {
return toQuery(queryBuilder, q -> {
Query filter = q.toFilter(this);
if (filter == null) {
return null;
}
return new ParsedQuery(filter, copyNamedQueries());
} finally {
reset();
}
return filter;
});
}
public ParsedQuery toQuery(QueryBuilder queryBuilder) {
return toQuery(queryBuilder, q -> {
Query query = q.toQuery(this);
if (query == null) {
query = Queries.newMatchNoDocsQuery("No query left after rewrite.");
}
return query;
});
}
private ParsedQuery innerParse(XContentParser parser) throws IOException, QueryShardException {
@FunctionalInterface
private interface CheckedFunction<T, R> {
R apply(T t) throws IOException;
}
private ParsedQuery toQuery(QueryBuilder queryBuilder, CheckedFunction<QueryBuilder, Query> filterOrQuery) {
reset();
try {
Query query = parseInnerQuery(parser);
return new ParsedQuery(query, copyNamedQueries());
} finally {
reset();
}
}
public Query parseInnerQuery(XContentParser parser) throws IOException {
return toQuery(this.newParseContext(parser).parseInnerQueryBuilder(), this);
}
public ParsedQuery toQuery(QueryBuilder<?> queryBuilder) {
reset();
try {
Query query = toQuery(queryBuilder, this);
return new ParsedQuery(query, copyNamedQueries());
QueryBuilder rewriteQuery = QueryBuilder.rewriteQuery(queryBuilder, this);
return new ParsedQuery(filterOrQuery.apply(rewriteQuery), copyNamedQueries());
} catch(QueryShardException | ParsingException e ) {
throw e;
} catch(Exception e) {
throw new QueryShardException(this, "failed to create query: {}", e, queryBuilder);
} finally {
this.reset();
reset();
}
}
private static Query toQuery(final QueryBuilder<?> queryBuilder, final QueryShardContext context) throws IOException {
final Query query = QueryBuilder.rewriteQuery(queryBuilder, context).toQuery(context);
if (query == null) {
return Queries.newMatchNoDocsQuery("No query left after rewrite.");
}
return query;
}
public final Index index() {
return indexSettings.getIndex();
}

View File

@ -20,6 +20,7 @@ package org.elasticsearch.search.query;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.ParsedQuery;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.SearchParseElement;
import org.elasticsearch.search.internal.SearchContext;
@ -30,7 +31,8 @@ public class PostFilterParseElement implements SearchParseElement {
@Override
public void parse(XContentParser parser, SearchContext context) throws Exception {
ParsedQuery postFilter = context.getQueryShardContext().parseInnerFilter(parser);
QueryShardContext shardContext = context.getQueryShardContext();
ParsedQuery postFilter = shardContext.toFilter(shardContext.newParseContext(parser).parseInnerQueryBuilder());
if (postFilter != null) {
context.parsedPostFilter(postFilter);
}

View File

@ -20,6 +20,7 @@
package org.elasticsearch.search.query;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.SearchParseElement;
import org.elasticsearch.search.internal.SearchContext;
@ -30,6 +31,7 @@ public class QueryParseElement implements SearchParseElement {
@Override
public void parse(XContentParser parser, SearchContext context) throws Exception {
context.parsedQuery(context.getQueryShardContext().parse(parser));
QueryShardContext queryShardContext = context.getQueryShardContext();
context.parsedQuery(queryShardContext.toQuery(queryShardContext.newParseContext(parser).parseInnerQueryBuilder()));
}
}

View File

@ -120,20 +120,6 @@ public final class QueryRescorer implements Rescorer {
}
}
private static final ObjectParser<QueryRescoreContext, QueryShardContext> RESCORE_PARSER = new ObjectParser<>("query", null);
static {
RESCORE_PARSER.declareObject(QueryRescoreContext::setQuery, (p, c) -> c.parse(p).query(), new ParseField("rescore_query"));
RESCORE_PARSER.declareFloat(QueryRescoreContext::setQueryWeight, new ParseField("query_weight"));
RESCORE_PARSER.declareFloat(QueryRescoreContext::setRescoreQueryWeight, new ParseField("rescore_query_weight"));
RESCORE_PARSER.declareString(QueryRescoreContext::setScoreMode, new ParseField("score_mode"));
}
@Override
public RescoreSearchContext parse(XContentParser parser, QueryShardContext context) throws IOException {
return RESCORE_PARSER.parse(parser, new QueryRescoreContext(this), context);
}
private final static Comparator<ScoreDoc> SCORE_DOC_COMPARATOR = new Comparator<ScoreDoc>() {
@Override
public int compare(ScoreDoc o1, ScoreDoc o2) {

View File

@ -23,8 +23,6 @@ import org.apache.lucene.index.Term;
import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.TopDocs;
import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
@ -65,16 +63,6 @@ public interface Rescorer {
public Explanation explain(int topLevelDocId, SearchContext context, RescoreSearchContext rescoreContext,
Explanation sourceExplanation) throws IOException;
/**
* Parses the {@link RescoreSearchContext} for this implementation
*
* @param parser the parser to read the context from
* @param context the current shard context
* @return the parsed {@link RescoreSearchContext}
* @throws IOException if an {@link IOException} occurs while parsing the context
*/
public RescoreSearchContext parse(XContentParser parser, QueryShardContext context) throws IOException;
/**
* Extracts all terms needed to execute this {@link Rescorer}. This method
* is executed in a distributed frequency collection roundtrip for

View File

@ -32,8 +32,11 @@ import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.ParsedQuery;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.script.CompiledScript;
import org.elasticsearch.script.ExecutableScript;
import org.elasticsearch.script.ScriptService;
@ -115,11 +118,14 @@ public final class PhraseSuggester extends Suggester<PhraseSuggestionContext> {
// from the index for a correction, collateMatch is updated
final Map<String, Object> vars = suggestion.getCollateScriptParams();
vars.put(SUGGESTION_TEMPLATE_VAR_NAME, spare.toString());
ScriptService scriptService = suggestion.getShardContext().getScriptService();
QueryShardContext shardContext = suggestion.getShardContext();
ScriptService scriptService = shardContext.getScriptService();
final ExecutableScript executable = scriptService.executable(collateScript, vars);
final BytesReference querySource = (BytesReference) executable.run();
final ParsedQuery parsedQuery = suggestion.getShardContext().parse(querySource);
collateMatch = Lucene.exists(searcher, parsedQuery.query());
try (XContentParser parser = XContentFactory.xContent(querySource).createParser(querySource)) {
final ParsedQuery parsedQuery = shardContext.toQuery(shardContext.newParseContext(parser).parseInnerQueryBuilder());
collateMatch = Lucene.exists(searcher, parsedQuery.query());
}
}
if (!collateMatch && !collatePrune) {
continue;
@ -142,7 +148,7 @@ public final class PhraseSuggester extends Suggester<PhraseSuggestionContext> {
return response;
}
private PhraseSuggestion.Entry buildResultEntry(SuggestionContext suggestion, CharsRefBuilder spare, double cutoffScore) {
private static PhraseSuggestion.Entry buildResultEntry(SuggestionContext suggestion, CharsRefBuilder spare, double cutoffScore) {
spare.copyUTF8Bytes(suggestion.getText());
return new PhraseSuggestion.Entry(new Text(spare.toString()), 0, spare.length(), cutoffScore);
}

View File

@ -593,6 +593,12 @@ public abstract class AbstractQueryTestCase<QB extends AbstractQueryBuilder<QB>>
assertNotEquals("modifying the boost doesn't affect the corresponding lucene query", rewrite(firstLuceneQuery),
rewrite(thirdLuceneQuery));
}
// check that context#isFilter is not changed by invoking toQuery/rewrite
boolean filterFlag = randomBoolean();
context.setIsFilter(filterFlag);
rewriteQuery(firstQuery, context).toQuery(context);
assertEquals("isFilter should be unchanged", filterFlag, context.isFilter());
}
}