Remove QueryParseContext (#25486)

QueryParseContext is currently only used as a wrapper for an XContentParser, so
this change removes it entirely and changes the appropriate APIs that use it so
far to only accept a parser instead.
This commit is contained in:
Christoph Büscher 2017-07-03 17:30:40 +02:00 committed by GitHub
parent 0e2cfc66bb
commit f576c987ce
118 changed files with 484 additions and 701 deletions

View File

@ -24,13 +24,14 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentLocation; import org.elasticsearch.common.xcontent.XContentLocation;
import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.RestStatus;
import java.io.IOException; import java.io.IOException;
/** /**
* Exception that can be used when parsing queries with a given {@link QueryParseContext}. * Exception that can be used when parsing queries with a given {@link
* XContentParser}.
* Can contain information about location of the error. * Can contain information about location of the error.
*/ */
public class ParsingException extends ElasticsearchException { public class ParsingException extends ElasticsearchException {
@ -57,7 +58,7 @@ public class ParsingException extends ElasticsearchException {
/** /**
* This constructor is provided for use in unit tests where a * This constructor is provided for use in unit tests where a
* {@link QueryParseContext} may not be available * {@link XContentParser} may not be available
*/ */
public ParsingException(int line, int col, String msg, Throwable cause) { public ParsingException(int line, int col, String msg, Throwable cause) {
super(msg, cause); super(msg, cause);

View File

@ -35,7 +35,6 @@ import org.elasticsearch.common.xcontent.NamedXContentRegistry.UnknownNamedObjec
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentLocation; import org.elasticsearch.common.xcontent.XContentLocation;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParser.Token;
import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.XContentType;
import java.io.IOException; import java.io.IOException;

View File

@ -138,7 +138,7 @@ public class IdsQueryBuilder extends AbstractQueryBuilder<IdsQueryBuilder> {
builder.endObject(); builder.endObject();
} }
private static ObjectParser<IdsQueryBuilder, QueryParseContext> PARSER = new ObjectParser<>(NAME, private static ObjectParser<IdsQueryBuilder, Void> PARSER = new ObjectParser<>(NAME,
() -> new IdsQueryBuilder()); () -> new IdsQueryBuilder());
static { static {

View File

@ -53,7 +53,7 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl
public static final ParseField IGNORE_UNMAPPED = new ParseField("ignore_unmapped"); public static final ParseField IGNORE_UNMAPPED = new ParseField("ignore_unmapped");
public static final QueryBuilder DEFAULT_INNER_HIT_QUERY = new MatchAllQueryBuilder(); public static final QueryBuilder DEFAULT_INNER_HIT_QUERY = new MatchAllQueryBuilder();
private static final ObjectParser<InnerHitBuilder, QueryParseContext> PARSER = new ObjectParser<>("inner_hits", InnerHitBuilder::new); private static final ObjectParser<InnerHitBuilder, Void> PARSER = new ObjectParser<>("inner_hits", InnerHitBuilder::new);
static { static {
PARSER.declareString(InnerHitBuilder::setName, NAME_FIELD); PARSER.declareString(InnerHitBuilder::setName, NAME_FIELD);

View File

@ -58,7 +58,7 @@ public class MatchAllQueryBuilder extends AbstractQueryBuilder<MatchAllQueryBuil
builder.endObject(); builder.endObject();
} }
private static final ObjectParser<MatchAllQueryBuilder, QueryParseContext> PARSER = new ObjectParser<>(NAME, MatchAllQueryBuilder::new); private static final ObjectParser<MatchAllQueryBuilder, Void> PARSER = new ObjectParser<>(NAME, MatchAllQueryBuilder::new);
static { static {
declareStandardFields(PARSER); declareStandardFields(PARSER);

View File

@ -1202,7 +1202,7 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder<MoreLikeThisQ
} }
@Override @Override
protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws IOException { protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) {
// TODO this needs heavy cleanups before we can rewrite it // TODO this needs heavy cleanups before we can rewrite it
return this; return this;
} }

View File

@ -1,37 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.query;
import org.elasticsearch.common.xcontent.XContentParser;
import java.util.Objects;
public class QueryParseContext {
private final XContentParser parser;
public QueryParseContext(XContentParser parser) {
this.parser = Objects.requireNonNull(parser, "parser cannot be null");
}
public XContentParser parser() {
return this.parser;
}
}

View File

@ -95,13 +95,6 @@ public class QueryRewriteContext {
return xContentRegistry; return xContentRegistry;
} }
/**
* Returns a new {@link QueryParseContext} that wraps the provided parser.
*/
public QueryParseContext newParseContext(XContentParser parser) {
return new QueryParseContext(parser);
}
public long nowInMillis() { public long nowInMillis() {
return nowInMillis.getAsLong(); return nowInMillis.getAsLong();
} }

View File

@ -437,7 +437,7 @@ public class TermsQueryBuilder extends AbstractQueryBuilder<TermsQueryBuilder> {
} }
@Override @Override
protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws IOException { protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) {
if (this.termsLookup != null) { if (this.termsLookup != null) {
TermsLookup termsLookup = new TermsLookup(this.termsLookup); TermsLookup termsLookup = new TermsLookup(this.termsLookup);
if (termsLookup.index() == null) { // TODO this should go away? if (termsLookup.index() == null) { // TODO this should go away?

View File

@ -31,7 +31,6 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContent; import org.elasticsearch.common.xcontent.XContent;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.BaseRestHandler;
import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestController;
import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestRequest;
@ -91,8 +90,7 @@ public class RestMultiSearchAction extends BaseRestHandler {
parseMultiLineRequest(restRequest, multiRequest.indicesOptions(), allowExplicitIndex, (searchRequest, parser) -> { parseMultiLineRequest(restRequest, multiRequest.indicesOptions(), allowExplicitIndex, (searchRequest, parser) -> {
try { try {
final QueryParseContext queryParseContext = new QueryParseContext(parser); searchRequest.source(SearchSourceBuilder.fromXContent(parser));
searchRequest.source(SearchSourceBuilder.fromXContent(queryParseContext));
multiRequest.add(searchRequest); multiRequest.add(searchRequest);
} catch (IOException e) { } catch (IOException e) {
throw new ElasticsearchParseException("Exception when parsing search request", e); throw new ElasticsearchParseException("Exception when parsing search request", e);

View File

@ -26,7 +26,6 @@ import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.BaseRestHandler;
import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestController;
import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestRequest;
@ -94,8 +93,7 @@ public class RestSearchAction extends BaseRestHandler {
} }
searchRequest.indices(Strings.splitStringByCommaToArray(request.param("index"))); searchRequest.indices(Strings.splitStringByCommaToArray(request.param("index")));
if (requestContentParser != null) { if (requestContentParser != null) {
QueryParseContext context = new QueryParseContext(requestContentParser); searchRequest.source().parseXContent(requestContentParser);
searchRequest.source().parseXContent(context);
} }
final int batchedReduceSize = request.paramAsInt("batched_reduce_size", searchRequest.getBatchedReduceSize()); final int batchedReduceSize = request.paramAsInt("batched_reduce_size", searchRequest.getBatchedReduceSize());

View File

@ -356,7 +356,7 @@ public class SearchModule {
registerAggregation(new AggregationSpec(FiltersAggregationBuilder.NAME, FiltersAggregationBuilder::new, registerAggregation(new AggregationSpec(FiltersAggregationBuilder.NAME, FiltersAggregationBuilder::new,
FiltersAggregationBuilder::parse).addResultReader(InternalFilters::new)); FiltersAggregationBuilder::parse).addResultReader(InternalFilters::new));
registerAggregation(new AggregationSpec(AdjacencyMatrixAggregationBuilder.NAME, AdjacencyMatrixAggregationBuilder::new, registerAggregation(new AggregationSpec(AdjacencyMatrixAggregationBuilder.NAME, AdjacencyMatrixAggregationBuilder::new,
AdjacencyMatrixAggregationBuilder.getParser()).addResultReader(InternalAdjacencyMatrix::new)); AdjacencyMatrixAggregationBuilder::parse).addResultReader(InternalAdjacencyMatrix::new));
registerAggregation(new AggregationSpec(SamplerAggregationBuilder.NAME, SamplerAggregationBuilder::new, registerAggregation(new AggregationSpec(SamplerAggregationBuilder.NAME, SamplerAggregationBuilder::new,
SamplerAggregationBuilder::parse) SamplerAggregationBuilder::parse)
.addResultReader(InternalSampler.NAME, InternalSampler::new) .addResultReader(InternalSampler.NAME, InternalSampler::new)
@ -410,7 +410,7 @@ public class SearchModule {
if (false == transportClient) { if (false == transportClient) {
namedXContents.add(new NamedXContentRegistry.Entry(BaseAggregationBuilder.class, spec.getName(), (p, c) -> { namedXContents.add(new NamedXContentRegistry.Entry(BaseAggregationBuilder.class, spec.getName(), (p, c) -> {
AggregatorFactories.AggParseContext context = (AggregatorFactories.AggParseContext) c; AggregatorFactories.AggParseContext context = (AggregatorFactories.AggParseContext) c;
return spec.getParser().parse(context.name, context.queryParseContext); return spec.getParser().parse(context.name, p);
})); }));
} }
namedWriteables.add( namedWriteables.add(
@ -507,7 +507,7 @@ public class SearchModule {
if (false == transportClient) { if (false == transportClient) {
namedXContents.add(new NamedXContentRegistry.Entry(BaseAggregationBuilder.class, spec.getName(), (p, c) -> { namedXContents.add(new NamedXContentRegistry.Entry(BaseAggregationBuilder.class, spec.getName(), (p, c) -> {
AggregatorFactories.AggParseContext context = (AggregatorFactories.AggParseContext) c; AggregatorFactories.AggParseContext context = (AggregatorFactories.AggParseContext) c;
return spec.getParser().parse(context.name, context.queryParseContext); return spec.getParser().parse(context.name, p);
})); }));
} }
namedWriteables.add( namedWriteables.add(

View File

@ -23,7 +23,7 @@ import org.elasticsearch.action.support.ToXContentToBytes;
import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.io.stream.NamedWriteable;
import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException; import java.io.IOException;
@ -84,9 +84,10 @@ public abstract class AggregationBuilder
} }
/** /**
* Internal: Registers sub-factories with this factory. The sub-factory will be * Internal: Registers sub-factories with this factory. The sub-factory will
* responsible for the creation of sub-aggregators under the aggregator * be responsible for the creation of sub-aggregators under the aggregator
* created by this factory. This is only for use by {@link AggregatorFactories#parseAggregators(QueryParseContext)}. * created by this factory. This is only for use by
* {@link AggregatorFactories#parseAggregators(XContentParser)}.
* *
* @param subFactories * @param subFactories
* The sub-factories * The sub-factories

View File

@ -25,7 +25,7 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.bucket.BucketsAggregator; import org.elasticsearch.search.aggregations.bucket.BucketsAggregator;
import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.SearchContext;
@ -50,11 +50,11 @@ public abstract class Aggregator extends BucketCollector implements Releasable {
* aggregation should be skipped (e.g. when trying to aggregate on unmapped fields). * aggregation should be skipped (e.g. when trying to aggregate on unmapped fields).
* *
* @param aggregationName The name of the aggregation * @param aggregationName The name of the aggregation
* @param context The parse context * @param parser The parser
* @return The resolved aggregator factory or {@code null} in case the aggregation should be skipped * @return The resolved aggregator factory or {@code null} in case the aggregation should be skipped
* @throws java.io.IOException When parsing fails * @throws java.io.IOException When parsing fails
*/ */
AggregationBuilder parse(String aggregationName, QueryParseContext context) throws IOException; AggregationBuilder parse(String aggregationName, XContentParser parser) throws IOException;
} }
/** /**

View File

@ -25,7 +25,6 @@ import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.search.aggregations.support.AggregationPath; import org.elasticsearch.search.aggregations.support.AggregationPath;
import org.elasticsearch.search.aggregations.support.AggregationPath.PathElement; import org.elasticsearch.search.aggregations.support.AggregationPath.PathElement;
@ -50,24 +49,18 @@ public class AggregatorFactories {
public static final Pattern VALID_AGG_NAME = Pattern.compile("[^\\[\\]>]+"); public static final Pattern VALID_AGG_NAME = Pattern.compile("[^\\[\\]>]+");
/** /**
* Parses the aggregation request recursively generating aggregator factories in turn. * Parses the aggregation request recursively generating aggregator
* * factories in turn.
* @param parseContext The parse context.
*
* @return The parsed aggregator factories.
*
* @throws IOException When parsing fails for unknown reasons.
*/ */
public static AggregatorFactories.Builder parseAggregators(QueryParseContext parseContext) throws IOException { public static AggregatorFactories.Builder parseAggregators(XContentParser parser) throws IOException {
return parseAggregators(parseContext, 0); return parseAggregators(parser, 0);
} }
private static AggregatorFactories.Builder parseAggregators(QueryParseContext parseContext, int level) throws IOException { private static AggregatorFactories.Builder parseAggregators(XContentParser parser, int level) throws IOException {
Matcher validAggMatcher = VALID_AGG_NAME.matcher(""); Matcher validAggMatcher = VALID_AGG_NAME.matcher("");
AggregatorFactories.Builder factories = new AggregatorFactories.Builder(); AggregatorFactories.Builder factories = new AggregatorFactories.Builder();
XContentParser.Token token = null; XContentParser.Token token = null;
XContentParser parser = parseContext.parser();
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token != XContentParser.Token.FIELD_NAME) { if (token != XContentParser.Token.FIELD_NAME) {
throw new ParsingException(parser.getTokenLocation(), throw new ParsingException(parser.getTokenLocation(),
@ -111,7 +104,7 @@ public class AggregatorFactories {
throw new ParsingException(parser.getTokenLocation(), throw new ParsingException(parser.getTokenLocation(),
"Found two sub aggregation definitions under [" + aggregationName + "]"); "Found two sub aggregation definitions under [" + aggregationName + "]");
} }
subFactories = parseAggregators(parseContext, level + 1); subFactories = parseAggregators(parser, level + 1);
break; break;
default: default:
if (aggBuilder != null) { if (aggBuilder != null) {
@ -120,7 +113,7 @@ public class AggregatorFactories {
} }
aggBuilder = parser.namedObject(BaseAggregationBuilder.class, fieldName, aggBuilder = parser.namedObject(BaseAggregationBuilder.class, fieldName,
new AggParseContext(aggregationName, parseContext)); new AggParseContext(aggregationName));
} }
} else { } else {
throw new ParsingException(parser.getTokenLocation(), "Expected [" + XContentParser.Token.START_OBJECT + "] under [" throw new ParsingException(parser.getTokenLocation(), "Expected [" + XContentParser.Token.START_OBJECT + "] under ["
@ -156,11 +149,9 @@ public class AggregatorFactories {
*/ */
public static final class AggParseContext { public static final class AggParseContext {
public final String name; public final String name;
public final QueryParseContext queryParseContext;
public AggParseContext(String name, QueryParseContext queryParseContext) { public AggParseContext(String name) {
this.name = name; this.name = name;
this.queryParseContext = queryParseContext;
} }
} }

View File

@ -28,7 +28,6 @@ import org.elasticsearch.common.util.Comparators;
import org.elasticsearch.common.xcontent.XContent; import org.elasticsearch.common.xcontent.XContent;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation.Bucket; import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation.Bucket;
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregator; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregator;
import org.elasticsearch.search.aggregations.support.AggregationPath; import org.elasticsearch.search.aggregations.support.AggregationPath;
@ -527,11 +526,10 @@ public class InternalOrder extends BucketOrder {
* Parse a {@link BucketOrder} from {@link XContent}. * Parse a {@link BucketOrder} from {@link XContent}.
* *
* @param parser for parsing {@link XContent} that contains the order. * @param parser for parsing {@link XContent} that contains the order.
* @param context parsing context.
* @return bucket ordering strategy * @return bucket ordering strategy
* @throws IOException on error a {@link XContent} parsing error. * @throws IOException on error a {@link XContent} parsing error.
*/ */
public static BucketOrder parseOrderParam(XContentParser parser, QueryParseContext context) throws IOException { public static BucketOrder parseOrderParam(XContentParser parser) throws IOException {
XContentParser.Token token; XContentParser.Token token;
String orderKey = null; String orderKey = null;
boolean orderAsc = false; boolean orderAsc = false;

View File

@ -24,12 +24,11 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.search.aggregations.AbstractAggregationBuilder; import org.elasticsearch.search.aggregations.AbstractAggregationBuilder;
import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.bucket.adjacency.AdjacencyMatrixAggregator.KeyedFilter; import org.elasticsearch.search.aggregations.bucket.adjacency.AdjacencyMatrixAggregator.KeyedFilter;
@ -56,26 +55,23 @@ public class AdjacencyMatrixAggregationBuilder extends AbstractAggregationBuilde
private List<KeyedFilter> filters; private List<KeyedFilter> filters;
private String separator = DEFAULT_SEPARATOR; private String separator = DEFAULT_SEPARATOR;
public static Aggregator.Parser getParser() { private static final ObjectParser<AdjacencyMatrixAggregationBuilder, Void> PARSER = new ObjectParser<>(
ObjectParser<AdjacencyMatrixAggregationBuilder, QueryParseContext> parser = new ObjectParser<>( AdjacencyMatrixAggregationBuilder.NAME);
AdjacencyMatrixAggregationBuilder.NAME); static {
parser.declareString(AdjacencyMatrixAggregationBuilder::separator, SEPARATOR_FIELD); PARSER.declareString(AdjacencyMatrixAggregationBuilder::separator, SEPARATOR_FIELD);
parser.declareNamedObjects(AdjacencyMatrixAggregationBuilder::setFiltersAsList, KeyedFilter.PARSER, FILTERS_FIELD); PARSER.declareNamedObjects(AdjacencyMatrixAggregationBuilder::setFiltersAsList, KeyedFilter.PARSER, FILTERS_FIELD);
return new Aggregator.Parser() { }
@Override
public AggregationBuilder parse(String aggregationName, QueryParseContext context) throws IOException { public static AggregationBuilder parse(String aggregationName, XContentParser parser) throws IOException {
AdjacencyMatrixAggregationBuilder result = parser.parse(context.parser(), AdjacencyMatrixAggregationBuilder result = PARSER.parse(parser, new AdjacencyMatrixAggregationBuilder(aggregationName), null);
new AdjacencyMatrixAggregationBuilder(aggregationName), context); result.checkConsistency();
result.checkConsistency(); return result;
return result;
}
};
} }
protected void checkConsistency() { protected void checkConsistency() {
if ((filters == null) || (filters.size() == 0)) { if ((filters == null) || (filters.size() == 0)) {
throw new IllegalStateException("[" + name + "] is missing : " + FILTERS_FIELD.getPreferredName() + " parameter"); throw new IllegalStateException("[" + name + "] is missing : " + FILTERS_FIELD.getPreferredName() + " parameter");
} }
} }
@ -96,17 +92,17 @@ public class AdjacencyMatrixAggregationBuilder extends AbstractAggregationBuilde
// the order of the filters in the request // the order of the filters in the request
Collections.sort(this.filters, Comparator.comparing(KeyedFilter::key)); Collections.sort(this.filters, Comparator.comparing(KeyedFilter::key));
} }
/** /**
* @param name * @param name
* the name of this aggregation * the name of this aggregation
*/ */
protected AdjacencyMatrixAggregationBuilder(String name) { protected AdjacencyMatrixAggregationBuilder(String name) {
super(name); super(name);
} }
/** /**
* @param name * @param name
* the name of this aggregation * the name of this aggregation
@ -171,8 +167,8 @@ public class AdjacencyMatrixAggregationBuilder extends AbstractAggregationBuilde
*/ */
public String separator() { public String separator() {
return separator; return separator;
} }
/** /**
* Get the filters. This will be an unmodifiable map * Get the filters. This will be an unmodifiable map
*/ */
@ -182,8 +178,8 @@ public class AdjacencyMatrixAggregationBuilder extends AbstractAggregationBuilde
result.put(keyedFilter.key(), keyedFilter.filter()); result.put(keyedFilter.key(), keyedFilter.filter());
} }
return result; return result;
} }
@Override @Override
protected AggregatorFactory<?> doBuild(SearchContext context, AggregatorFactory<?> parent, Builder subFactoriesBuilder) protected AggregatorFactory<?> doBuild(SearchContext context, AggregatorFactory<?> parent, Builder subFactoriesBuilder)

View File

@ -32,7 +32,6 @@ import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregation;
@ -68,11 +67,10 @@ public class AdjacencyMatrixAggregator extends BucketsAggregator {
private final String key; private final String key;
private final QueryBuilder filter; private final QueryBuilder filter;
public static final NamedObjectParser<KeyedFilter, QueryParseContext> PARSER = public static final NamedObjectParser<KeyedFilter, Void> PARSER =
(XContentParser p, QueryParseContext c, String name) -> (XContentParser p, Void c, String name) ->
new KeyedFilter(name, parseInnerQueryBuilder(p)); new KeyedFilter(name, parseInnerQueryBuilder(p));
public KeyedFilter(String key, QueryBuilder filter) { public KeyedFilter(String key, QueryBuilder filter) {
if (key == null) { if (key == null) {
throw new IllegalArgumentException("[key] must not be null"); throw new IllegalArgumentException("[key] must not be null");

View File

@ -22,8 +22,8 @@ package org.elasticsearch.search.aggregations.bucket.filter;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.search.aggregations.AbstractAggregationBuilder; import org.elasticsearch.search.aggregations.AbstractAggregationBuilder;
import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.AggregatorFactory;
@ -84,8 +84,8 @@ public class FilterAggregationBuilder extends AbstractAggregationBuilder<FilterA
return builder; return builder;
} }
public static FilterAggregationBuilder parse(String aggregationName, QueryParseContext context) throws IOException { public static FilterAggregationBuilder parse(String aggregationName, XContentParser parser) throws IOException {
QueryBuilder filter = parseInnerQueryBuilder(context.parser()); QueryBuilder filter = parseInnerQueryBuilder(parser);
return new FilterAggregationBuilder(aggregationName, filter); return new FilterAggregationBuilder(aggregationName, filter);
} }

View File

@ -26,7 +26,6 @@ import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.search.aggregations.AbstractAggregationBuilder; import org.elasticsearch.search.aggregations.AbstractAggregationBuilder;
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.AggregatorFactory;
@ -202,9 +201,8 @@ public class FiltersAggregationBuilder extends AbstractAggregationBuilder<Filter
return builder; return builder;
} }
public static FiltersAggregationBuilder parse(String aggregationName, QueryParseContext context) public static FiltersAggregationBuilder parse(String aggregationName, XContentParser parser)
throws IOException { throws IOException {
XContentParser parser = context.parser();
List<FiltersAggregator.KeyedFilter> keyedFilters = null; List<FiltersAggregator.KeyedFilter> keyedFilters = null;
List<QueryBuilder> nonKeyedFilters = null; List<QueryBuilder> nonKeyedFilters = null;
@ -238,7 +236,7 @@ public class FiltersAggregationBuilder extends AbstractAggregationBuilder<Filter
if (token == XContentParser.Token.FIELD_NAME) { if (token == XContentParser.Token.FIELD_NAME) {
key = parser.currentName(); key = parser.currentName();
} else { } else {
QueryBuilder filter = parseInnerQueryBuilder(context.parser()); QueryBuilder filter = parseInnerQueryBuilder(parser);
keyedFilters.add(new FiltersAggregator.KeyedFilter(key, filter)); keyedFilters.add(new FiltersAggregator.KeyedFilter(key, filter));
} }
} }
@ -250,7 +248,7 @@ public class FiltersAggregationBuilder extends AbstractAggregationBuilder<Filter
if (FILTERS_FIELD.match(currentFieldName)) { if (FILTERS_FIELD.match(currentFieldName)) {
nonKeyedFilters = new ArrayList<>(); nonKeyedFilters = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
QueryBuilder filter = parseInnerQueryBuilder(context.parser()); QueryBuilder filter = parseInnerQueryBuilder(parser);
nonKeyedFilters.add(filter); nonKeyedFilters.add(filter);
} }
} else { } else {

View File

@ -28,11 +28,11 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.AbstractSortingNumericDocValues; import org.elasticsearch.index.fielddata.AbstractSortingNumericDocValues;
import org.elasticsearch.index.fielddata.MultiGeoPointValues; import org.elasticsearch.index.fielddata.MultiGeoPointValues;
import org.elasticsearch.index.fielddata.SortedBinaryDocValues; import org.elasticsearch.index.fielddata.SortedBinaryDocValues;
import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.bucket.BucketUtils; import org.elasticsearch.search.aggregations.bucket.BucketUtils;
@ -53,7 +53,7 @@ public class GeoGridAggregationBuilder extends ValuesSourceAggregationBuilder<Va
public static final int DEFAULT_PRECISION = 5; public static final int DEFAULT_PRECISION = 5;
public static final int DEFAULT_MAX_NUM_CELLS = 10000; public static final int DEFAULT_MAX_NUM_CELLS = 10000;
private static final ObjectParser<GeoGridAggregationBuilder, QueryParseContext> PARSER; private static final ObjectParser<GeoGridAggregationBuilder, Void> PARSER;
static { static {
PARSER = new ObjectParser<>(GeoGridAggregationBuilder.NAME); PARSER = new ObjectParser<>(GeoGridAggregationBuilder.NAME);
ValuesSourceParserHelper.declareGeoFields(PARSER, false, false); ValuesSourceParserHelper.declareGeoFields(PARSER, false, false);
@ -62,8 +62,8 @@ public class GeoGridAggregationBuilder extends ValuesSourceAggregationBuilder<Va
PARSER.declareInt(GeoGridAggregationBuilder::shardSize, GeoHashGridParams.FIELD_SHARD_SIZE); PARSER.declareInt(GeoGridAggregationBuilder::shardSize, GeoHashGridParams.FIELD_SHARD_SIZE);
} }
public static GeoGridAggregationBuilder parse(String aggregationName, QueryParseContext context) throws IOException { public static GeoGridAggregationBuilder parse(String aggregationName, XContentParser parser) throws IOException {
return PARSER.parse(context.parser(), new GeoGridAggregationBuilder(aggregationName), context); return PARSER.parse(parser, new GeoGridAggregationBuilder(aggregationName), null);
} }
private int precision = DEFAULT_PRECISION; private int precision = DEFAULT_PRECISION;

View File

@ -22,7 +22,7 @@ package org.elasticsearch.search.aggregations.bucket.global;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.AbstractAggregationBuilder; import org.elasticsearch.search.aggregations.AbstractAggregationBuilder;
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.AggregatorFactory;
@ -62,8 +62,8 @@ public class GlobalAggregationBuilder extends AbstractAggregationBuilder<GlobalA
return builder; return builder;
} }
public static GlobalAggregationBuilder parse(String aggregationName, QueryParseContext context) throws IOException { public static GlobalAggregationBuilder parse(String aggregationName, XContentParser parser) throws IOException {
context.parser().nextToken(); parser.nextToken();
return new GlobalAggregationBuilder(aggregationName); return new GlobalAggregationBuilder(aggregationName);
} }

View File

@ -27,7 +27,6 @@ import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.BucketOrder; import org.elasticsearch.search.aggregations.BucketOrder;
@ -81,7 +80,7 @@ public class DateHistogramAggregationBuilder
DATE_FIELD_UNITS = unmodifiableMap(dateFieldUnits); DATE_FIELD_UNITS = unmodifiableMap(dateFieldUnits);
} }
private static final ObjectParser<DateHistogramAggregationBuilder, QueryParseContext> PARSER; private static final ObjectParser<DateHistogramAggregationBuilder, Void> PARSER;
static { static {
PARSER = new ObjectParser<>(DateHistogramAggregationBuilder.NAME); PARSER = new ObjectParser<>(DateHistogramAggregationBuilder.NAME);
ValuesSourceParserHelper.declareNumericFields(PARSER, true, true, true); ValuesSourceParserHelper.declareNumericFields(PARSER, true, true, true);
@ -115,12 +114,12 @@ public class DateHistogramAggregationBuilder
PARSER.declareField(DateHistogramAggregationBuilder::extendedBounds, parser -> ExtendedBounds.PARSER.apply(parser, null), PARSER.declareField(DateHistogramAggregationBuilder::extendedBounds, parser -> ExtendedBounds.PARSER.apply(parser, null),
ExtendedBounds.EXTENDED_BOUNDS_FIELD, ObjectParser.ValueType.OBJECT); ExtendedBounds.EXTENDED_BOUNDS_FIELD, ObjectParser.ValueType.OBJECT);
PARSER.declareObjectArray(DateHistogramAggregationBuilder::order, InternalOrder.Parser::parseOrderParam, PARSER.declareObjectArray(DateHistogramAggregationBuilder::order, (p, c) -> InternalOrder.Parser.parseOrderParam(p),
Histogram.ORDER_FIELD); Histogram.ORDER_FIELD);
} }
public static DateHistogramAggregationBuilder parse(String aggregationName, QueryParseContext context) throws IOException { public static DateHistogramAggregationBuilder parse(String aggregationName, XContentParser parser) throws IOException {
return PARSER.parse(context.parser(), new DateHistogramAggregationBuilder(aggregationName), context); return PARSER.parse(parser, new DateHistogramAggregationBuilder(aggregationName), null);
} }
private long interval; private long interval;

View File

@ -24,7 +24,7 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.BucketOrder; import org.elasticsearch.search.aggregations.BucketOrder;
@ -59,7 +59,7 @@ public class HistogramAggregationBuilder
EXTENDED_BOUNDS_PARSER.declareDouble((bounds, d) -> bounds[1] = d, new ParseField("max")); EXTENDED_BOUNDS_PARSER.declareDouble((bounds, d) -> bounds[1] = d, new ParseField("max"));
} }
private static final ObjectParser<HistogramAggregationBuilder, QueryParseContext> PARSER; private static final ObjectParser<HistogramAggregationBuilder, Void> PARSER;
static { static {
PARSER = new ObjectParser<>(HistogramAggregationBuilder.NAME); PARSER = new ObjectParser<>(HistogramAggregationBuilder.NAME);
ValuesSourceParserHelper.declareNumericFields(PARSER, true, true, false); ValuesSourceParserHelper.declareNumericFields(PARSER, true, true, false);
@ -76,12 +76,12 @@ public class HistogramAggregationBuilder
histogram.extendedBounds(extendedBounds[0], extendedBounds[1]); histogram.extendedBounds(extendedBounds[0], extendedBounds[1]);
}, parser -> EXTENDED_BOUNDS_PARSER.apply(parser, null), ExtendedBounds.EXTENDED_BOUNDS_FIELD, ObjectParser.ValueType.OBJECT); }, parser -> EXTENDED_BOUNDS_PARSER.apply(parser, null), ExtendedBounds.EXTENDED_BOUNDS_FIELD, ObjectParser.ValueType.OBJECT);
PARSER.declareObjectArray(HistogramAggregationBuilder::order, InternalOrder.Parser::parseOrderParam, PARSER.declareObjectArray(HistogramAggregationBuilder::order, (p, c) -> InternalOrder.Parser.parseOrderParam(p),
Histogram.ORDER_FIELD); Histogram.ORDER_FIELD);
} }
public static HistogramAggregationBuilder parse(String aggregationName, QueryParseContext context) throws IOException { public static HistogramAggregationBuilder parse(String aggregationName, XContentParser parser) throws IOException {
return PARSER.parse(context.parser(), new HistogramAggregationBuilder(aggregationName), context); return PARSER.parse(parser, new HistogramAggregationBuilder(aggregationName), null);
} }
private double interval; private double interval;

View File

@ -23,7 +23,7 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.aggregations.support.ValueType;
@ -40,14 +40,14 @@ import java.io.IOException;
public class MissingAggregationBuilder extends ValuesSourceAggregationBuilder<ValuesSource, MissingAggregationBuilder> { public class MissingAggregationBuilder extends ValuesSourceAggregationBuilder<ValuesSource, MissingAggregationBuilder> {
public static final String NAME = "missing"; public static final String NAME = "missing";
private static final ObjectParser<MissingAggregationBuilder, QueryParseContext> PARSER; private static final ObjectParser<MissingAggregationBuilder, Void> PARSER;
static { static {
PARSER = new ObjectParser<>(MissingAggregationBuilder.NAME); PARSER = new ObjectParser<>(MissingAggregationBuilder.NAME);
ValuesSourceParserHelper.declareAnyFields(PARSER, true, true); ValuesSourceParserHelper.declareAnyFields(PARSER, true, true);
} }
public static MissingAggregationBuilder parse(String aggregationName, QueryParseContext context) throws IOException { public static MissingAggregationBuilder parse(String aggregationName, XContentParser parser) throws IOException {
return PARSER.parse(context.parser(), new MissingAggregationBuilder(aggregationName, null), context); return PARSER.parse(parser, new MissingAggregationBuilder(aggregationName, null), null);
} }
public MissingAggregationBuilder(String name, ValueType targetValueType) { public MissingAggregationBuilder(String name, ValueType targetValueType) {

View File

@ -25,7 +25,6 @@ import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.ObjectMapper; import org.elasticsearch.index.mapper.ObjectMapper;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.search.aggregations.AbstractAggregationBuilder; import org.elasticsearch.search.aggregations.AbstractAggregationBuilder;
import org.elasticsearch.search.aggregations.AggregationExecutionException; import org.elasticsearch.search.aggregations.AggregationExecutionException;
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
@ -104,12 +103,11 @@ public class NestedAggregationBuilder extends AbstractAggregationBuilder<NestedA
return builder; return builder;
} }
public static NestedAggregationBuilder parse(String aggregationName, QueryParseContext context) throws IOException { public static NestedAggregationBuilder parse(String aggregationName, XContentParser parser) throws IOException {
String path = null; String path = null;
XContentParser.Token token; XContentParser.Token token;
String currentFieldName = null; String currentFieldName = null;
XContentParser parser = context.parser();
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) { if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName(); currentFieldName = parser.currentName();

View File

@ -25,7 +25,6 @@ import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.ObjectMapper; import org.elasticsearch.index.mapper.ObjectMapper;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.support.NestedScope; import org.elasticsearch.index.query.support.NestedScope;
import org.elasticsearch.search.SearchParseException; import org.elasticsearch.search.SearchParseException;
import org.elasticsearch.search.aggregations.AbstractAggregationBuilder; import org.elasticsearch.search.aggregations.AbstractAggregationBuilder;
@ -128,12 +127,11 @@ public class ReverseNestedAggregationBuilder extends AbstractAggregationBuilder<
return builder; return builder;
} }
public static ReverseNestedAggregationBuilder parse(String aggregationName, QueryParseContext context) throws IOException { public static ReverseNestedAggregationBuilder parse(String aggregationName, XContentParser parser) throws IOException {
String path = null; String path = null;
XContentParser.Token token; XContentParser.Token token;
String currentFieldName = null; String currentFieldName = null;
XContentParser parser = context.parser();
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) { if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName(); currentFieldName = parser.currentName();

View File

@ -22,7 +22,6 @@ package org.elasticsearch.search.aggregations.bucket.range;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.AggregatorFactory;
@ -37,7 +36,7 @@ import java.io.IOException;
public class RangeAggregationBuilder extends AbstractRangeBuilder<RangeAggregationBuilder, Range> { public class RangeAggregationBuilder extends AbstractRangeBuilder<RangeAggregationBuilder, Range> {
public static final String NAME = "range"; public static final String NAME = "range";
private static final ObjectParser<RangeAggregationBuilder, QueryParseContext> PARSER; private static final ObjectParser<RangeAggregationBuilder, Void> PARSER;
static { static {
PARSER = new ObjectParser<>(RangeAggregationBuilder.NAME); PARSER = new ObjectParser<>(RangeAggregationBuilder.NAME);
ValuesSourceParserHelper.declareNumericFields(PARSER, true, true, false); ValuesSourceParserHelper.declareNumericFields(PARSER, true, true, false);
@ -47,14 +46,14 @@ public class RangeAggregationBuilder extends AbstractRangeBuilder<RangeAggregati
for (Range range : ranges) { for (Range range : ranges) {
agg.addRange(range); agg.addRange(range);
} }
}, RangeAggregationBuilder::parseRange, RangeAggregator.RANGES_FIELD); }, (p, c) -> RangeAggregationBuilder.parseRange(p), RangeAggregator.RANGES_FIELD);
} }
public static AggregationBuilder parse(String aggregationName, QueryParseContext context) throws IOException { public static AggregationBuilder parse(String aggregationName, XContentParser parser) throws IOException {
return PARSER.parse(context.parser(), new RangeAggregationBuilder(aggregationName), context); return PARSER.parse(parser, new RangeAggregationBuilder(aggregationName), null);
} }
private static Range parseRange(XContentParser parser, QueryParseContext context) throws IOException { private static Range parseRange(XContentParser parser) throws IOException {
return Range.fromXContent(parser); return Range.fromXContent(parser);
} }

View File

@ -22,7 +22,6 @@ package org.elasticsearch.search.aggregations.bucket.range.date;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.AggregatorFactory;
@ -40,7 +39,7 @@ import java.io.IOException;
public class DateRangeAggregationBuilder extends AbstractRangeBuilder<DateRangeAggregationBuilder, RangeAggregator.Range> { public class DateRangeAggregationBuilder extends AbstractRangeBuilder<DateRangeAggregationBuilder, RangeAggregator.Range> {
public static final String NAME = "date_range"; public static final String NAME = "date_range";
private static final ObjectParser<DateRangeAggregationBuilder, QueryParseContext> PARSER; private static final ObjectParser<DateRangeAggregationBuilder, Void> PARSER;
static { static {
PARSER = new ObjectParser<>(DateRangeAggregationBuilder.NAME); PARSER = new ObjectParser<>(DateRangeAggregationBuilder.NAME);
ValuesSourceParserHelper.declareNumericFields(PARSER, true, true, true); ValuesSourceParserHelper.declareNumericFields(PARSER, true, true, true);
@ -50,14 +49,14 @@ public class DateRangeAggregationBuilder extends AbstractRangeBuilder<DateRangeA
for (Range range : ranges) { for (Range range : ranges) {
agg.addRange(range); agg.addRange(range);
} }
}, DateRangeAggregationBuilder::parseRange, RangeAggregator.RANGES_FIELD); }, (p, c) -> DateRangeAggregationBuilder.parseRange(p), RangeAggregator.RANGES_FIELD);
} }
public static AggregationBuilder parse(String aggregationName, QueryParseContext context) throws IOException { public static AggregationBuilder parse(String aggregationName, XContentParser parser) throws IOException {
return PARSER.parse(context.parser(), new DateRangeAggregationBuilder(aggregationName), context); return PARSER.parse(parser, new DateRangeAggregationBuilder(aggregationName), null);
} }
private static Range parseRange(XContentParser parser, QueryParseContext context) throws IOException { private static Range parseRange(XContentParser parser) throws IOException {
return Range.fromXContent(parser); return Range.fromXContent(parser);
} }

View File

@ -30,7 +30,6 @@ import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParser.Token; import org.elasticsearch.common.xcontent.XContentParser.Token;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.AggregatorFactory;
@ -54,7 +53,7 @@ public class GeoDistanceAggregationBuilder extends ValuesSourceAggregationBuilde
static final ParseField UNIT_FIELD = new ParseField("unit"); static final ParseField UNIT_FIELD = new ParseField("unit");
static final ParseField DISTANCE_TYPE_FIELD = new ParseField("distance_type"); static final ParseField DISTANCE_TYPE_FIELD = new ParseField("distance_type");
private static final ObjectParser<GeoDistanceAggregationBuilder, QueryParseContext> PARSER; private static final ObjectParser<GeoDistanceAggregationBuilder, Void> PARSER;
static { static {
PARSER = new ObjectParser<>(GeoDistanceAggregationBuilder.NAME); PARSER = new ObjectParser<>(GeoDistanceAggregationBuilder.NAME);
ValuesSourceParserHelper.declareGeoFields(PARSER, true, false); ValuesSourceParserHelper.declareGeoFields(PARSER, true, false);
@ -65,7 +64,7 @@ public class GeoDistanceAggregationBuilder extends ValuesSourceAggregationBuilde
for (Range range : ranges) { for (Range range : ranges) {
agg.addRange(range); agg.addRange(range);
} }
}, GeoDistanceAggregationBuilder::parseRange, RangeAggregator.RANGES_FIELD); }, (p, c) -> GeoDistanceAggregationBuilder.parseRange(p), RangeAggregator.RANGES_FIELD);
PARSER.declareField(GeoDistanceAggregationBuilder::unit, p -> DistanceUnit.fromString(p.text()), PARSER.declareField(GeoDistanceAggregationBuilder::unit, p -> DistanceUnit.fromString(p.text()),
UNIT_FIELD, ObjectParser.ValueType.STRING); UNIT_FIELD, ObjectParser.ValueType.STRING);
@ -77,8 +76,8 @@ public class GeoDistanceAggregationBuilder extends ValuesSourceAggregationBuilde
ORIGIN_FIELD, ObjectParser.ValueType.OBJECT_ARRAY_OR_STRING); ORIGIN_FIELD, ObjectParser.ValueType.OBJECT_ARRAY_OR_STRING);
} }
public static AggregationBuilder parse(String aggregationName, QueryParseContext context) throws IOException { public static AggregationBuilder parse(String aggregationName, XContentParser parser) throws IOException {
GeoDistanceAggregationBuilder builder = PARSER.parse(context.parser(), new GeoDistanceAggregationBuilder(aggregationName), context); GeoDistanceAggregationBuilder builder = PARSER.parse(parser, new GeoDistanceAggregationBuilder(aggregationName), null);
if (builder.origin() == null) { if (builder.origin() == null) {
throw new IllegalArgumentException("Aggregation [" + aggregationName + "] must define an [origin]."); throw new IllegalArgumentException("Aggregation [" + aggregationName + "] must define an [origin].");
} }
@ -116,7 +115,7 @@ public class GeoDistanceAggregationBuilder extends ValuesSourceAggregationBuilde
} }
} }
private static GeoPoint parseGeoPoint(XContentParser parser, QueryParseContext context) throws IOException { private static GeoPoint parseGeoPoint(XContentParser parser) throws IOException {
Token token = parser.currentToken(); Token token = parser.currentToken();
if (token == XContentParser.Token.VALUE_STRING) { if (token == XContentParser.Token.VALUE_STRING) {
GeoPoint point = new GeoPoint(); GeoPoint point = new GeoPoint();
@ -164,7 +163,7 @@ public class GeoDistanceAggregationBuilder extends ValuesSourceAggregationBuilde
throw new IllegalArgumentException("Unexpected token [" + token + "] while parsing geo point"); throw new IllegalArgumentException("Unexpected token [" + token + "] while parsing geo point");
} }
private static Range parseRange(XContentParser parser, QueryParseContext context) throws IOException { private static Range parseRange(XContentParser parser) throws IOException {
String fromAsStr = null; String fromAsStr = null;
String toAsStr = null; String toAsStr = null;
double from = 0.0; double from = 0.0;

View File

@ -30,7 +30,6 @@ import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParser.Token; import org.elasticsearch.common.xcontent.XContentParser.Token;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.script.Script; import org.elasticsearch.script.Script;
import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
@ -61,7 +60,7 @@ public final class IpRangeAggregationBuilder
public static final String NAME = "ip_range"; public static final String NAME = "ip_range";
private static final ParseField MASK_FIELD = new ParseField("mask"); private static final ParseField MASK_FIELD = new ParseField("mask");
private static final ObjectParser<IpRangeAggregationBuilder, QueryParseContext> PARSER; private static final ObjectParser<IpRangeAggregationBuilder, Void> PARSER;
static { static {
PARSER = new ObjectParser<>(IpRangeAggregationBuilder.NAME); PARSER = new ObjectParser<>(IpRangeAggregationBuilder.NAME);
ValuesSourceParserHelper.declareBytesFields(PARSER, false, false); ValuesSourceParserHelper.declareBytesFields(PARSER, false, false);
@ -70,14 +69,14 @@ public final class IpRangeAggregationBuilder
PARSER.declareObjectArray((agg, ranges) -> { PARSER.declareObjectArray((agg, ranges) -> {
for (Range range : ranges) agg.addRange(range); for (Range range : ranges) agg.addRange(range);
}, IpRangeAggregationBuilder::parseRange, RangeAggregator.RANGES_FIELD); }, (p, c) -> IpRangeAggregationBuilder.parseRange(p), RangeAggregator.RANGES_FIELD);
} }
public static AggregationBuilder parse(String aggregationName, QueryParseContext context) throws IOException { public static AggregationBuilder parse(String aggregationName, XContentParser parser) throws IOException {
return PARSER.parse(context.parser(), new IpRangeAggregationBuilder(aggregationName), context); return PARSER.parse(parser, new IpRangeAggregationBuilder(aggregationName), null);
} }
private static Range parseRange(XContentParser parser, QueryParseContext context) throws IOException { private static Range parseRange(XContentParser parser) throws IOException {
String key = null; String key = null;
String from = null; String from = null;
String to = null; String to = null;

View File

@ -23,7 +23,7 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.AggregatorFactory;
@ -43,7 +43,7 @@ public class DiversifiedAggregationBuilder extends ValuesSourceAggregationBuilde
public static final int MAX_DOCS_PER_VALUE_DEFAULT = 1; public static final int MAX_DOCS_PER_VALUE_DEFAULT = 1;
private static final ObjectParser<DiversifiedAggregationBuilder, QueryParseContext> PARSER; private static final ObjectParser<DiversifiedAggregationBuilder, Void> PARSER;
static { static {
PARSER = new ObjectParser<>(DiversifiedAggregationBuilder.NAME); PARSER = new ObjectParser<>(DiversifiedAggregationBuilder.NAME);
ValuesSourceParserHelper.declareAnyFields(PARSER, true, false); ValuesSourceParserHelper.declareAnyFields(PARSER, true, false);
@ -52,8 +52,8 @@ public class DiversifiedAggregationBuilder extends ValuesSourceAggregationBuilde
PARSER.declareString(DiversifiedAggregationBuilder::executionHint, SamplerAggregator.EXECUTION_HINT_FIELD); PARSER.declareString(DiversifiedAggregationBuilder::executionHint, SamplerAggregator.EXECUTION_HINT_FIELD);
} }
public static AggregationBuilder parse(String aggregationName, QueryParseContext context) throws IOException { public static AggregationBuilder parse(String aggregationName, XContentParser parser) throws IOException {
return PARSER.parse(context.parser(), new DiversifiedAggregationBuilder(aggregationName), context); return PARSER.parse(parser, new DiversifiedAggregationBuilder(aggregationName), null);
} }
private int shardSize = SamplerAggregationBuilder.DEFAULT_SHARD_SAMPLE_SIZE; private int shardSize = SamplerAggregationBuilder.DEFAULT_SHARD_SAMPLE_SIZE;

View File

@ -24,7 +24,6 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.search.aggregations.AbstractAggregationBuilder; import org.elasticsearch.search.aggregations.AbstractAggregationBuilder;
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.AggregatorFactory;
@ -86,12 +85,11 @@ public class SamplerAggregationBuilder extends AbstractAggregationBuilder<Sample
return builder; return builder;
} }
public static SamplerAggregationBuilder parse(String aggregationName, QueryParseContext context) throws IOException { public static SamplerAggregationBuilder parse(String aggregationName, XContentParser parser) throws IOException {
XContentParser.Token token; XContentParser.Token token;
String currentFieldName = null; String currentFieldName = null;
Integer shardSize = null; Integer shardSize = null;
XContentParser parser = context.parser();
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) { if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName(); currentFieldName = parser.currentName();

View File

@ -24,8 +24,8 @@ import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ParseFieldRegistry; import org.elasticsearch.common.xcontent.ParseFieldRegistry;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
@ -62,43 +62,45 @@ public class SignificantTermsAggregationBuilder extends ValuesSourceAggregationB
static final SignificanceHeuristic DEFAULT_SIGNIFICANCE_HEURISTIC = new JLHScore(); static final SignificanceHeuristic DEFAULT_SIGNIFICANCE_HEURISTIC = new JLHScore();
public static Aggregator.Parser getParser(ParseFieldRegistry<SignificanceHeuristicParser> significanceHeuristicParserRegistry) { public static Aggregator.Parser getParser(ParseFieldRegistry<SignificanceHeuristicParser> significanceHeuristicParserRegistry) {
ObjectParser<SignificantTermsAggregationBuilder, QueryParseContext> parser = ObjectParser<SignificantTermsAggregationBuilder, Void> aggregationParser =
new ObjectParser<>(SignificantTermsAggregationBuilder.NAME); new ObjectParser<>(SignificantTermsAggregationBuilder.NAME);
ValuesSourceParserHelper.declareAnyFields(parser, true, true); ValuesSourceParserHelper.declareAnyFields(aggregationParser, true, true);
parser.declareInt(SignificantTermsAggregationBuilder::shardSize, TermsAggregationBuilder.SHARD_SIZE_FIELD_NAME); aggregationParser.declareInt(SignificantTermsAggregationBuilder::shardSize, TermsAggregationBuilder.SHARD_SIZE_FIELD_NAME);
parser.declareLong(SignificantTermsAggregationBuilder::minDocCount, TermsAggregationBuilder.MIN_DOC_COUNT_FIELD_NAME); aggregationParser.declareLong(SignificantTermsAggregationBuilder::minDocCount, TermsAggregationBuilder.MIN_DOC_COUNT_FIELD_NAME);
parser.declareLong(SignificantTermsAggregationBuilder::shardMinDocCount, TermsAggregationBuilder.SHARD_MIN_DOC_COUNT_FIELD_NAME); aggregationParser.declareLong(SignificantTermsAggregationBuilder::shardMinDocCount,
TermsAggregationBuilder.SHARD_MIN_DOC_COUNT_FIELD_NAME);
parser.declareInt(SignificantTermsAggregationBuilder::size, TermsAggregationBuilder.REQUIRED_SIZE_FIELD_NAME); aggregationParser.declareInt(SignificantTermsAggregationBuilder::size, TermsAggregationBuilder.REQUIRED_SIZE_FIELD_NAME);
parser.declareString(SignificantTermsAggregationBuilder::executionHint, TermsAggregationBuilder.EXECUTION_HINT_FIELD_NAME); aggregationParser.declareString(SignificantTermsAggregationBuilder::executionHint,
TermsAggregationBuilder.EXECUTION_HINT_FIELD_NAME);
parser.declareObject(SignificantTermsAggregationBuilder::backgroundFilter, aggregationParser.declareObject(SignificantTermsAggregationBuilder::backgroundFilter,
(p, context) -> parseInnerQueryBuilder(p), (p, context) -> parseInnerQueryBuilder(p),
SignificantTermsAggregationBuilder.BACKGROUND_FILTER); SignificantTermsAggregationBuilder.BACKGROUND_FILTER);
parser.declareField((b, v) -> b.includeExclude(IncludeExclude.merge(v, b.includeExclude())), aggregationParser.declareField((b, v) -> b.includeExclude(IncludeExclude.merge(v, b.includeExclude())),
IncludeExclude::parseInclude, IncludeExclude.INCLUDE_FIELD, ObjectParser.ValueType.OBJECT_ARRAY_OR_STRING); IncludeExclude::parseInclude, IncludeExclude.INCLUDE_FIELD, ObjectParser.ValueType.OBJECT_ARRAY_OR_STRING);
parser.declareField((b, v) -> b.includeExclude(IncludeExclude.merge(b.includeExclude(), v)), aggregationParser.declareField((b, v) -> b.includeExclude(IncludeExclude.merge(b.includeExclude(), v)),
IncludeExclude::parseExclude, IncludeExclude.EXCLUDE_FIELD, ObjectParser.ValueType.STRING_ARRAY); IncludeExclude::parseExclude, IncludeExclude.EXCLUDE_FIELD, ObjectParser.ValueType.STRING_ARRAY);
for (String name : significanceHeuristicParserRegistry.getNames()) { for (String name : significanceHeuristicParserRegistry.getNames()) {
parser.declareObject(SignificantTermsAggregationBuilder::significanceHeuristic, aggregationParser.declareObject(SignificantTermsAggregationBuilder::significanceHeuristic,
(p, context) -> { (p, context) -> {
SignificanceHeuristicParser significanceHeuristicParser = significanceHeuristicParserRegistry SignificanceHeuristicParser significanceHeuristicParser = significanceHeuristicParserRegistry
.lookupReturningNullIfNotFound(name); .lookupReturningNullIfNotFound(name);
return significanceHeuristicParser.parse(context); return significanceHeuristicParser.parse(p);
}, },
new ParseField(name)); new ParseField(name));
} }
return new Aggregator.Parser() { return new Aggregator.Parser() {
@Override @Override
public AggregationBuilder parse(String aggregationName, QueryParseContext context) throws IOException { public AggregationBuilder parse(String aggregationName, XContentParser parser) throws IOException {
return parser.parse(context.parser(), new SignificantTermsAggregationBuilder(aggregationName, null), context); return aggregationParser.parse(parser, new SignificantTermsAggregationBuilder(aggregationName, null), null);
} }
}; };
} }

View File

@ -25,9 +25,9 @@ import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ParseFieldRegistry; import org.elasticsearch.common.xcontent.ParseFieldRegistry;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.AbstractQueryBuilder; import org.elasticsearch.index.query.AbstractQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.search.aggregations.AbstractAggregationBuilder; import org.elasticsearch.search.aggregations.AbstractAggregationBuilder;
import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.AggregationInitializationException; import org.elasticsearch.search.aggregations.AggregationInitializationException;
@ -70,55 +70,55 @@ public class SignificantTextAggregationBuilder extends AbstractAggregationBuilde
public static Aggregator.Parser getParser( public static Aggregator.Parser getParser(
ParseFieldRegistry<SignificanceHeuristicParser> significanceHeuristicParserRegistry) { ParseFieldRegistry<SignificanceHeuristicParser> significanceHeuristicParserRegistry) {
ObjectParser<SignificantTextAggregationBuilder, QueryParseContext> parser = new ObjectParser<>( ObjectParser<SignificantTextAggregationBuilder, Void> PARSER = new ObjectParser<>(
SignificantTextAggregationBuilder.NAME); SignificantTextAggregationBuilder.NAME);
parser.declareInt(SignificantTextAggregationBuilder::shardSize, PARSER.declareInt(SignificantTextAggregationBuilder::shardSize,
TermsAggregationBuilder.SHARD_SIZE_FIELD_NAME); TermsAggregationBuilder.SHARD_SIZE_FIELD_NAME);
parser.declareLong(SignificantTextAggregationBuilder::minDocCount, PARSER.declareLong(SignificantTextAggregationBuilder::minDocCount,
TermsAggregationBuilder.MIN_DOC_COUNT_FIELD_NAME); TermsAggregationBuilder.MIN_DOC_COUNT_FIELD_NAME);
parser.declareLong(SignificantTextAggregationBuilder::shardMinDocCount, PARSER.declareLong(SignificantTextAggregationBuilder::shardMinDocCount,
TermsAggregationBuilder.SHARD_MIN_DOC_COUNT_FIELD_NAME); TermsAggregationBuilder.SHARD_MIN_DOC_COUNT_FIELD_NAME);
parser.declareInt(SignificantTextAggregationBuilder::size, PARSER.declareInt(SignificantTextAggregationBuilder::size,
TermsAggregationBuilder.REQUIRED_SIZE_FIELD_NAME); TermsAggregationBuilder.REQUIRED_SIZE_FIELD_NAME);
parser.declareString(SignificantTextAggregationBuilder::fieldName, FIELD_NAME); PARSER.declareString(SignificantTextAggregationBuilder::fieldName, FIELD_NAME);
parser.declareStringArray(SignificantTextAggregationBuilder::sourceFieldNames, SOURCE_FIELDS_NAME); PARSER.declareStringArray(SignificantTextAggregationBuilder::sourceFieldNames, SOURCE_FIELDS_NAME);
parser.declareBoolean(SignificantTextAggregationBuilder::filterDuplicateText, PARSER.declareBoolean(SignificantTextAggregationBuilder::filterDuplicateText,
FILTER_DUPLICATE_TEXT_FIELD_NAME); FILTER_DUPLICATE_TEXT_FIELD_NAME);
parser.declareObject(SignificantTextAggregationBuilder::backgroundFilter, PARSER.declareObject(SignificantTextAggregationBuilder::backgroundFilter,
(p, context) -> AbstractQueryBuilder.parseInnerQueryBuilder(p), (p, context) -> AbstractQueryBuilder.parseInnerQueryBuilder(p),
SignificantTermsAggregationBuilder.BACKGROUND_FILTER); SignificantTermsAggregationBuilder.BACKGROUND_FILTER);
parser.declareField((b, v) -> b.includeExclude(IncludeExclude.merge(v, b.includeExclude())), PARSER.declareField((b, v) -> b.includeExclude(IncludeExclude.merge(v, b.includeExclude())),
IncludeExclude::parseInclude, IncludeExclude.INCLUDE_FIELD, IncludeExclude::parseInclude, IncludeExclude.INCLUDE_FIELD,
ObjectParser.ValueType.OBJECT_ARRAY_OR_STRING); ObjectParser.ValueType.OBJECT_ARRAY_OR_STRING);
parser.declareField((b, v) -> b.includeExclude(IncludeExclude.merge(b.includeExclude(), v)), PARSER.declareField((b, v) -> b.includeExclude(IncludeExclude.merge(b.includeExclude(), v)),
IncludeExclude::parseExclude, IncludeExclude.EXCLUDE_FIELD, IncludeExclude::parseExclude, IncludeExclude.EXCLUDE_FIELD,
ObjectParser.ValueType.STRING_ARRAY); ObjectParser.ValueType.STRING_ARRAY);
for (String name : significanceHeuristicParserRegistry.getNames()) { for (String name : significanceHeuristicParserRegistry.getNames()) {
parser.declareObject(SignificantTextAggregationBuilder::significanceHeuristic, PARSER.declareObject(SignificantTextAggregationBuilder::significanceHeuristic,
(p, context) -> { (p, context) -> {
SignificanceHeuristicParser significanceHeuristicParser = significanceHeuristicParserRegistry SignificanceHeuristicParser significanceHeuristicParser = significanceHeuristicParserRegistry
.lookupReturningNullIfNotFound(name); .lookupReturningNullIfNotFound(name);
return significanceHeuristicParser.parse(context); return significanceHeuristicParser.parse(p);
}, new ParseField(name)); }, new ParseField(name));
} }
return new Aggregator.Parser() { return new Aggregator.Parser() {
@Override @Override
public AggregationBuilder parse(String aggregationName, QueryParseContext context) public AggregationBuilder parse(String aggregationName, XContentParser parser)
throws IOException { throws IOException {
return parser.parse(context.parser(), return PARSER.parse(parser,
new SignificantTextAggregationBuilder(aggregationName, null), context); new SignificantTextAggregationBuilder(aggregationName, null), null);
} }
}; };
} }

View File

@ -26,7 +26,6 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardException; import org.elasticsearch.index.query.QueryShardException;
import java.io.IOException; import java.io.IOException;
@ -113,8 +112,7 @@ public class GND extends NXYSignificanceHeuristic {
} }
@Override @Override
public SignificanceHeuristic parse(QueryParseContext context) throws IOException, QueryShardException { public SignificanceHeuristic parse(XContentParser parser) throws IOException, QueryShardException {
XContentParser parser = context.parser();
String givenName = parser.currentName(); String givenName = parser.currentName();
boolean backgroundIsSuperset = true; boolean backgroundIsSuperset = true;
XContentParser.Token token = parser.nextToken(); XContentParser.Token token = parser.nextToken();

View File

@ -26,7 +26,6 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardException; import org.elasticsearch.index.query.QueryShardException;
import java.io.IOException; import java.io.IOException;
@ -104,9 +103,8 @@ public class JLHScore extends SignificanceHeuristic {
return builder; return builder;
} }
public static SignificanceHeuristic parse(QueryParseContext context) public static SignificanceHeuristic parse(XContentParser parser)
throws IOException, QueryShardException { throws IOException, QueryShardException {
XContentParser parser = context.parser();
// move to the closing bracket // move to the closing bracket
if (!parser.nextToken().equals(XContentParser.Token.END_OBJECT)) { if (!parser.nextToken().equals(XContentParser.Token.END_OBJECT)) {
throw new ElasticsearchParseException( throw new ElasticsearchParseException(

View File

@ -27,7 +27,6 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardException; import org.elasticsearch.index.query.QueryShardException;
import java.io.IOException; import java.io.IOException;
@ -152,9 +151,8 @@ public abstract class NXYSignificanceHeuristic extends SignificanceHeuristic {
public abstract static class NXYParser implements SignificanceHeuristicParser { public abstract static class NXYParser implements SignificanceHeuristicParser {
@Override @Override
public SignificanceHeuristic parse(QueryParseContext context) public SignificanceHeuristic parse(XContentParser parser)
throws IOException, QueryShardException { throws IOException, QueryShardException {
XContentParser parser = context.parser();
String givenName = parser.currentName(); String givenName = parser.currentName();
boolean includeNegatives = false; boolean includeNegatives = false;
boolean backgroundIsSuperset = true; boolean backgroundIsSuperset = true;

View File

@ -26,7 +26,6 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardException; import org.elasticsearch.index.query.QueryShardException;
import java.io.IOException; import java.io.IOException;
@ -56,9 +55,8 @@ public class PercentageScore extends SignificanceHeuristic {
return builder; return builder;
} }
public static SignificanceHeuristic parse(QueryParseContext context) public static SignificanceHeuristic parse(XContentParser parser)
throws IOException, QueryShardException { throws IOException, QueryShardException {
XContentParser parser = context.parser();
// move to the closing bracket // move to the closing bracket
if (!parser.nextToken().equals(XContentParser.Token.END_OBJECT)) { if (!parser.nextToken().equals(XContentParser.Token.END_OBJECT)) {
throw new ElasticsearchParseException("failed to parse [percentage] significance heuristic. expected an empty object, but got [{}] instead", parser.currentToken()); throw new ElasticsearchParseException("failed to parse [percentage] significance heuristic. expected an empty object, but got [{}] instead", parser.currentToken());

View File

@ -26,7 +26,6 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.query.QueryShardException; import org.elasticsearch.index.query.QueryShardException;
import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.ExecutableScript;
@ -149,9 +148,8 @@ public class ScriptHeuristic extends SignificanceHeuristic {
return Objects.equals(script, other.script); return Objects.equals(script, other.script);
} }
public static SignificanceHeuristic parse(QueryParseContext context) public static SignificanceHeuristic parse(XContentParser parser)
throws IOException, QueryShardException { throws IOException, QueryShardException {
XContentParser parser = context.parser();
String heuristicName = parser.currentName(); String heuristicName = parser.currentName();
Script script = null; Script script = null;
XContentParser.Token token; XContentParser.Token token;

View File

@ -22,7 +22,6 @@ package org.elasticsearch.search.aggregations.bucket.significant.heuristics;
import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryParseContext;
import java.io.IOException; import java.io.IOException;
@ -31,5 +30,5 @@ import java.io.IOException;
*/ */
@FunctionalInterface @FunctionalInterface
public interface SignificanceHeuristicParser { public interface SignificanceHeuristicParser {
SignificanceHeuristic parse(QueryParseContext context) throws IOException, ParsingException; SignificanceHeuristic parse(XContentParser parser) throws IOException, ParsingException;
} }

View File

@ -23,16 +23,16 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode; import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode;
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregator.BucketCountThresholds;
import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude;
import org.elasticsearch.search.aggregations.BucketOrder; import org.elasticsearch.search.aggregations.BucketOrder;
import org.elasticsearch.search.aggregations.InternalOrder; import org.elasticsearch.search.aggregations.InternalOrder;
import org.elasticsearch.search.aggregations.InternalOrder.CompoundOrder; import org.elasticsearch.search.aggregations.InternalOrder.CompoundOrder;
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregator.BucketCountThresholds;
import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude;
import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.aggregations.support.ValueType;
import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
@ -60,7 +60,7 @@ public class TermsAggregationBuilder extends ValuesSourceAggregationBuilder<Valu
public static final ParseField SHOW_TERM_DOC_COUNT_ERROR = new ParseField("show_term_doc_count_error"); public static final ParseField SHOW_TERM_DOC_COUNT_ERROR = new ParseField("show_term_doc_count_error");
public static final ParseField ORDER_FIELD = new ParseField("order"); public static final ParseField ORDER_FIELD = new ParseField("order");
private static final ObjectParser<TermsAggregationBuilder, QueryParseContext> PARSER; private static final ObjectParser<TermsAggregationBuilder, Void> PARSER;
static { static {
PARSER = new ObjectParser<>(TermsAggregationBuilder.NAME); PARSER = new ObjectParser<>(TermsAggregationBuilder.NAME);
ValuesSourceParserHelper.declareAnyFields(PARSER, true, true); ValuesSourceParserHelper.declareAnyFields(PARSER, true, true);
@ -82,7 +82,7 @@ public class TermsAggregationBuilder extends ValuesSourceAggregationBuilder<Valu
(p, c) -> SubAggCollectionMode.parse(p.text()), (p, c) -> SubAggCollectionMode.parse(p.text()),
SubAggCollectionMode.KEY, ObjectParser.ValueType.STRING); SubAggCollectionMode.KEY, ObjectParser.ValueType.STRING);
PARSER.declareObjectArray(TermsAggregationBuilder::order, InternalOrder.Parser::parseOrderParam, PARSER.declareObjectArray(TermsAggregationBuilder::order, (p, c) -> InternalOrder.Parser.parseOrderParam(p),
TermsAggregationBuilder.ORDER_FIELD); TermsAggregationBuilder.ORDER_FIELD);
PARSER.declareField((b, v) -> b.includeExclude(IncludeExclude.merge(v, b.includeExclude())), PARSER.declareField((b, v) -> b.includeExclude(IncludeExclude.merge(v, b.includeExclude())),
@ -92,8 +92,8 @@ public class TermsAggregationBuilder extends ValuesSourceAggregationBuilder<Valu
IncludeExclude::parseExclude, IncludeExclude.EXCLUDE_FIELD, ObjectParser.ValueType.STRING_ARRAY); IncludeExclude::parseExclude, IncludeExclude.EXCLUDE_FIELD, ObjectParser.ValueType.STRING_ARRAY);
} }
public static AggregationBuilder parse(String aggregationName, QueryParseContext context) throws IOException { public static AggregationBuilder parse(String aggregationName, XContentParser parser) throws IOException {
return PARSER.parse(context.parser(), new TermsAggregationBuilder(aggregationName, null), context); return PARSER.parse(parser, new TermsAggregationBuilder(aggregationName, null), null);
} }
private BucketOrder order = BucketOrder.compound(BucketOrder.count(false)); // automatically adds tie-breaker key asc order private BucketOrder order = BucketOrder.compound(BucketOrder.count(false)); // automatically adds tie-breaker key asc order

View File

@ -44,7 +44,6 @@ import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.DocValueFormat;
import java.io.IOException; import java.io.IOException;
@ -93,7 +92,7 @@ public class IncludeExclude implements Writeable, ToXContent {
} }
} }
public static IncludeExclude parseInclude(XContentParser parser, QueryParseContext context) throws IOException { public static IncludeExclude parseInclude(XContentParser parser) throws IOException {
XContentParser.Token token = parser.currentToken(); XContentParser.Token token = parser.currentToken();
if (token == XContentParser.Token.VALUE_STRING) { if (token == XContentParser.Token.VALUE_STRING) {
return new IncludeExclude(parser.text(), null); return new IncludeExclude(parser.text(), null);
@ -128,7 +127,7 @@ public class IncludeExclude implements Writeable, ToXContent {
} }
} }
public static IncludeExclude parseExclude(XContentParser parser, QueryParseContext context) throws IOException { public static IncludeExclude parseExclude(XContentParser parser) throws IOException {
XContentParser.Token token = parser.currentToken(); XContentParser.Token token = parser.currentToken();
if (token == XContentParser.Token.VALUE_STRING) { if (token == XContentParser.Token.VALUE_STRING) {
return new IncludeExclude(null, parser.text()); return new IncludeExclude(null, parser.text());
@ -170,6 +169,7 @@ public class IncludeExclude implements Writeable, ToXContent {
} }
} }
@Override
public boolean accept(long value) { public boolean accept(long value) {
return ((valids == null) || (valids.contains(value))) && ((invalids == null) || (!invalids.contains(value))); return ((valids == null) || (valids.contains(value))) && ((invalids == null) || (!invalids.contains(value)));
} }

View File

@ -23,7 +23,7 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.AggregatorFactory;
@ -41,14 +41,14 @@ import java.io.IOException;
public class AvgAggregationBuilder extends ValuesSourceAggregationBuilder.LeafOnly<ValuesSource.Numeric, AvgAggregationBuilder> { public class AvgAggregationBuilder extends ValuesSourceAggregationBuilder.LeafOnly<ValuesSource.Numeric, AvgAggregationBuilder> {
public static final String NAME = "avg"; public static final String NAME = "avg";
private static final ObjectParser<AvgAggregationBuilder, QueryParseContext> PARSER; private static final ObjectParser<AvgAggregationBuilder, Void> PARSER;
static { static {
PARSER = new ObjectParser<>(AvgAggregationBuilder.NAME); PARSER = new ObjectParser<>(AvgAggregationBuilder.NAME);
ValuesSourceParserHelper.declareNumericFields(PARSER, true, true, false); ValuesSourceParserHelper.declareNumericFields(PARSER, true, true, false);
} }
public static AggregationBuilder parse(String aggregationName, QueryParseContext context) throws IOException { public static AggregationBuilder parse(String aggregationName, XContentParser parser) throws IOException {
return PARSER.parse(context.parser(), new AvgAggregationBuilder(aggregationName), context); return PARSER.parse(parser, new AvgAggregationBuilder(aggregationName), null);
} }
public AvgAggregationBuilder(String name) { public AvgAggregationBuilder(String name) {

View File

@ -24,7 +24,7 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.AggregatorFactory;
@ -47,7 +47,7 @@ public final class CardinalityAggregationBuilder
private static final ParseField REHASH = new ParseField("rehash").withAllDeprecated("no replacement - values will always be rehashed"); private static final ParseField REHASH = new ParseField("rehash").withAllDeprecated("no replacement - values will always be rehashed");
public static final ParseField PRECISION_THRESHOLD_FIELD = new ParseField("precision_threshold"); public static final ParseField PRECISION_THRESHOLD_FIELD = new ParseField("precision_threshold");
private static final ObjectParser<CardinalityAggregationBuilder, QueryParseContext> PARSER; private static final ObjectParser<CardinalityAggregationBuilder, Void> PARSER;
static { static {
PARSER = new ObjectParser<>(CardinalityAggregationBuilder.NAME); PARSER = new ObjectParser<>(CardinalityAggregationBuilder.NAME);
ValuesSourceParserHelper.declareAnyFields(PARSER, true, false); ValuesSourceParserHelper.declareAnyFields(PARSER, true, false);
@ -55,8 +55,8 @@ public final class CardinalityAggregationBuilder
PARSER.declareLong((b, v) -> {/*ignore*/}, REHASH); PARSER.declareLong((b, v) -> {/*ignore*/}, REHASH);
} }
public static AggregationBuilder parse(String aggregationName, QueryParseContext context) throws IOException { public static AggregationBuilder parse(String aggregationName, XContentParser parser) throws IOException {
return PARSER.parse(context.parser(), new CardinalityAggregationBuilder(aggregationName, null), context); return PARSER.parse(parser, new CardinalityAggregationBuilder(aggregationName, null), null);
} }
private Long precisionThreshold = null; private Long precisionThreshold = null;

View File

@ -23,7 +23,7 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.AggregatorFactory;
@ -41,15 +41,15 @@ import java.util.Objects;
public class GeoBoundsAggregationBuilder extends ValuesSourceAggregationBuilder<ValuesSource.GeoPoint, GeoBoundsAggregationBuilder> { public class GeoBoundsAggregationBuilder extends ValuesSourceAggregationBuilder<ValuesSource.GeoPoint, GeoBoundsAggregationBuilder> {
public static final String NAME = "geo_bounds"; public static final String NAME = "geo_bounds";
private static final ObjectParser<GeoBoundsAggregationBuilder, QueryParseContext> PARSER; private static final ObjectParser<GeoBoundsAggregationBuilder, Void> PARSER;
static { static {
PARSER = new ObjectParser<>(GeoBoundsAggregationBuilder.NAME); PARSER = new ObjectParser<>(GeoBoundsAggregationBuilder.NAME);
ValuesSourceParserHelper.declareGeoFields(PARSER, false, false); ValuesSourceParserHelper.declareGeoFields(PARSER, false, false);
PARSER.declareBoolean(GeoBoundsAggregationBuilder::wrapLongitude, GeoBoundsAggregator.WRAP_LONGITUDE_FIELD); PARSER.declareBoolean(GeoBoundsAggregationBuilder::wrapLongitude, GeoBoundsAggregator.WRAP_LONGITUDE_FIELD);
} }
public static AggregationBuilder parse(String aggregationName, QueryParseContext context) throws IOException { public static AggregationBuilder parse(String aggregationName, XContentParser parser) throws IOException {
return PARSER.parse(context.parser(), new GeoBoundsAggregationBuilder(aggregationName), context); return PARSER.parse(parser, new GeoBoundsAggregationBuilder(aggregationName), null);
} }
private boolean wrapLongitude = true; private boolean wrapLongitude = true;

View File

@ -23,7 +23,7 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.AggregatorFactory;
@ -41,14 +41,14 @@ public class GeoCentroidAggregationBuilder
extends ValuesSourceAggregationBuilder.LeafOnly<ValuesSource.GeoPoint, GeoCentroidAggregationBuilder> { extends ValuesSourceAggregationBuilder.LeafOnly<ValuesSource.GeoPoint, GeoCentroidAggregationBuilder> {
public static final String NAME = "geo_centroid"; public static final String NAME = "geo_centroid";
private static final ObjectParser<GeoCentroidAggregationBuilder, QueryParseContext> PARSER; private static final ObjectParser<GeoCentroidAggregationBuilder, Void> PARSER;
static { static {
PARSER = new ObjectParser<>(GeoCentroidAggregationBuilder.NAME); PARSER = new ObjectParser<>(GeoCentroidAggregationBuilder.NAME);
ValuesSourceParserHelper.declareGeoFields(PARSER, true, false); ValuesSourceParserHelper.declareGeoFields(PARSER, true, false);
} }
public static AggregationBuilder parse(String aggregationName, QueryParseContext context) throws IOException { public static AggregationBuilder parse(String aggregationName, XContentParser parser) throws IOException {
return PARSER.parse(context.parser(), new GeoCentroidAggregationBuilder(aggregationName), context); return PARSER.parse(parser, new GeoCentroidAggregationBuilder(aggregationName), null);
} }
public GeoCentroidAggregationBuilder(String name) { public GeoCentroidAggregationBuilder(String name) {

View File

@ -23,7 +23,7 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.AggregatorFactory;
@ -41,14 +41,14 @@ import java.io.IOException;
public class MaxAggregationBuilder extends ValuesSourceAggregationBuilder.LeafOnly<ValuesSource.Numeric, MaxAggregationBuilder> { public class MaxAggregationBuilder extends ValuesSourceAggregationBuilder.LeafOnly<ValuesSource.Numeric, MaxAggregationBuilder> {
public static final String NAME = "max"; public static final String NAME = "max";
private static final ObjectParser<MaxAggregationBuilder, QueryParseContext> PARSER; private static final ObjectParser<MaxAggregationBuilder, Void> PARSER;
static { static {
PARSER = new ObjectParser<>(MaxAggregationBuilder.NAME); PARSER = new ObjectParser<>(MaxAggregationBuilder.NAME);
ValuesSourceParserHelper.declareNumericFields(PARSER, true, true, false); ValuesSourceParserHelper.declareNumericFields(PARSER, true, true, false);
} }
public static AggregationBuilder parse(String aggregationName, QueryParseContext context) throws IOException { public static AggregationBuilder parse(String aggregationName, XContentParser parser) throws IOException {
return PARSER.parse(context.parser(), new MaxAggregationBuilder(aggregationName), context); return PARSER.parse(parser, new MaxAggregationBuilder(aggregationName), null);
} }
public MaxAggregationBuilder(String name) { public MaxAggregationBuilder(String name) {

View File

@ -23,7 +23,7 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.AggregatorFactory;
@ -42,14 +42,14 @@ import java.io.IOException;
public class MinAggregationBuilder extends ValuesSourceAggregationBuilder.LeafOnly<ValuesSource.Numeric, MinAggregationBuilder> { public class MinAggregationBuilder extends ValuesSourceAggregationBuilder.LeafOnly<ValuesSource.Numeric, MinAggregationBuilder> {
public static final String NAME = "min"; public static final String NAME = "min";
private static final ObjectParser<MinAggregationBuilder, QueryParseContext> PARSER; private static final ObjectParser<MinAggregationBuilder, Void> PARSER;
static { static {
PARSER = new ObjectParser<>(AvgAggregationBuilder.NAME); PARSER = new ObjectParser<>(AvgAggregationBuilder.NAME);
ValuesSourceParserHelper.declareNumericFields(PARSER, true, true, false); ValuesSourceParserHelper.declareNumericFields(PARSER, true, true, false);
} }
public static AggregationBuilder parse(String aggregationName, QueryParseContext context) throws IOException { public static AggregationBuilder parse(String aggregationName, XContentParser parser) throws IOException {
return PARSER.parse(context.parser(), new MinAggregationBuilder(aggregationName), context); return PARSER.parse(parser, new MinAggregationBuilder(aggregationName), null);
} }
public MinAggregationBuilder(String name) { public MinAggregationBuilder(String name) {

View File

@ -24,7 +24,7 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.AggregatorFactory;
@ -53,7 +53,7 @@ public class PercentileRanksAggregationBuilder extends LeafOnly<ValuesSource.Num
Double compression; Double compression;
} }
private static final ObjectParser<TDigestOptions, QueryParseContext> TDIGEST_OPTIONS_PARSER = private static final ObjectParser<TDigestOptions, Void> TDIGEST_OPTIONS_PARSER =
new ObjectParser<>(PercentilesMethod.TDIGEST.getParseField().getPreferredName(), TDigestOptions::new); new ObjectParser<>(PercentilesMethod.TDIGEST.getParseField().getPreferredName(), TDigestOptions::new);
static { static {
TDIGEST_OPTIONS_PARSER.declareDouble((opts, compression) -> opts.compression = compression, new ParseField("compression")); TDIGEST_OPTIONS_PARSER.declareDouble((opts, compression) -> opts.compression = compression, new ParseField("compression"));
@ -63,14 +63,14 @@ public class PercentileRanksAggregationBuilder extends LeafOnly<ValuesSource.Num
Integer numberOfSigDigits; Integer numberOfSigDigits;
} }
private static final ObjectParser<HDROptions, QueryParseContext> HDR_OPTIONS_PARSER = private static final ObjectParser<HDROptions, Void> HDR_OPTIONS_PARSER =
new ObjectParser<>(PercentilesMethod.HDR.getParseField().getPreferredName(), HDROptions::new); new ObjectParser<>(PercentilesMethod.HDR.getParseField().getPreferredName(), HDROptions::new);
static { static {
HDR_OPTIONS_PARSER.declareInt((opts, numberOfSigDigits) -> opts.numberOfSigDigits = numberOfSigDigits, HDR_OPTIONS_PARSER.declareInt((opts, numberOfSigDigits) -> opts.numberOfSigDigits = numberOfSigDigits,
new ParseField("number_of_significant_value_digits")); new ParseField("number_of_significant_value_digits"));
} }
private static final ObjectParser<PercentileRanksAggregationBuilder, QueryParseContext> PARSER; private static final ObjectParser<PercentileRanksAggregationBuilder, Void> PARSER;
static { static {
PARSER = new ObjectParser<>(PercentileRanksAggregationBuilder.NAME); PARSER = new ObjectParser<>(PercentileRanksAggregationBuilder.NAME);
ValuesSourceParserHelper.declareNumericFields(PARSER, true, false, false); ValuesSourceParserHelper.declareNumericFields(PARSER, true, false, false);
@ -96,8 +96,8 @@ public class PercentileRanksAggregationBuilder extends LeafOnly<ValuesSource.Num
}, HDR_OPTIONS_PARSER::parse, PercentilesMethod.HDR.getParseField(), ObjectParser.ValueType.OBJECT); }, HDR_OPTIONS_PARSER::parse, PercentilesMethod.HDR.getParseField(), ObjectParser.ValueType.OBJECT);
} }
public static AggregationBuilder parse(String aggregationName, QueryParseContext context) throws IOException { public static AggregationBuilder parse(String aggregationName, XContentParser parser) throws IOException {
return PARSER.parse(context.parser(), new PercentileRanksAggregationBuilder(aggregationName), context); return PARSER.parse(parser, new PercentileRanksAggregationBuilder(aggregationName), null);
} }
private double[] values; private double[] values;

View File

@ -24,7 +24,7 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.AggregatorFactory;
@ -58,7 +58,7 @@ public class PercentilesAggregationBuilder extends LeafOnly<ValuesSource.Numeric
Double compression; Double compression;
} }
private static final ObjectParser<TDigestOptions, QueryParseContext> TDIGEST_OPTIONS_PARSER = private static final ObjectParser<TDigestOptions, Void> TDIGEST_OPTIONS_PARSER =
new ObjectParser<>(PercentilesMethod.TDIGEST.getParseField().getPreferredName(), TDigestOptions::new); new ObjectParser<>(PercentilesMethod.TDIGEST.getParseField().getPreferredName(), TDigestOptions::new);
static { static {
TDIGEST_OPTIONS_PARSER.declareDouble((opts, compression) -> opts.compression = compression, COMPRESSION_FIELD); TDIGEST_OPTIONS_PARSER.declareDouble((opts, compression) -> opts.compression = compression, COMPRESSION_FIELD);
@ -68,7 +68,7 @@ public class PercentilesAggregationBuilder extends LeafOnly<ValuesSource.Numeric
Integer numberOfSigDigits; Integer numberOfSigDigits;
} }
private static final ObjectParser<HDROptions, QueryParseContext> HDR_OPTIONS_PARSER = private static final ObjectParser<HDROptions, Void> HDR_OPTIONS_PARSER =
new ObjectParser<>(PercentilesMethod.HDR.getParseField().getPreferredName(), HDROptions::new); new ObjectParser<>(PercentilesMethod.HDR.getParseField().getPreferredName(), HDROptions::new);
static { static {
HDR_OPTIONS_PARSER.declareInt( HDR_OPTIONS_PARSER.declareInt(
@ -76,7 +76,7 @@ public class PercentilesAggregationBuilder extends LeafOnly<ValuesSource.Numeric
NUMBER_SIGNIFICANT_DIGITS_FIELD); NUMBER_SIGNIFICANT_DIGITS_FIELD);
} }
private static final ObjectParser<PercentilesAggregationBuilder, QueryParseContext> PARSER; private static final ObjectParser<PercentilesAggregationBuilder, Void> PARSER;
static { static {
PARSER = new ObjectParser<>(PercentilesAggregationBuilder.NAME); PARSER = new ObjectParser<>(PercentilesAggregationBuilder.NAME);
ValuesSourceParserHelper.declareNumericFields(PARSER, true, true, false); ValuesSourceParserHelper.declareNumericFields(PARSER, true, true, false);
@ -102,8 +102,8 @@ public class PercentilesAggregationBuilder extends LeafOnly<ValuesSource.Numeric
}, HDR_OPTIONS_PARSER::parse, PercentilesMethod.HDR.getParseField(), ObjectParser.ValueType.OBJECT); }, HDR_OPTIONS_PARSER::parse, PercentilesMethod.HDR.getParseField(), ObjectParser.ValueType.OBJECT);
} }
public static AggregationBuilder parse(String aggregationName, QueryParseContext context) throws IOException { public static AggregationBuilder parse(String aggregationName, XContentParser parser) throws IOException {
return PARSER.parse(context.parser(), new PercentilesAggregationBuilder(aggregationName), context); return PARSER.parse(parser, new PercentilesAggregationBuilder(aggregationName), null);
} }
private double[] percents = DEFAULT_PERCENTS; private double[] percents = DEFAULT_PERCENTS;

View File

@ -25,11 +25,9 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.ExecutableScript;
import org.elasticsearch.script.Script; import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.script.SearchScript; import org.elasticsearch.script.SearchScript;
import org.elasticsearch.search.aggregations.AbstractAggregationBuilder; import org.elasticsearch.search.aggregations.AbstractAggregationBuilder;
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
@ -39,7 +37,6 @@ import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException; import java.io.IOException;
import java.util.Map; import java.util.Map;
import java.util.Objects; import java.util.Objects;
import java.util.function.Function;
public class ScriptedMetricAggregationBuilder extends AbstractAggregationBuilder<ScriptedMetricAggregationBuilder> { public class ScriptedMetricAggregationBuilder extends AbstractAggregationBuilder<ScriptedMetricAggregationBuilder> {
public static final String NAME = "scripted_metric"; public static final String NAME = "scripted_metric";
@ -228,7 +225,7 @@ public class ScriptedMetricAggregationBuilder extends AbstractAggregationBuilder
return builder; return builder;
} }
public static ScriptedMetricAggregationBuilder parse(String aggregationName, QueryParseContext context) throws IOException { public static ScriptedMetricAggregationBuilder parse(String aggregationName, XContentParser parser) throws IOException {
Script initScript = null; Script initScript = null;
Script mapScript = null; Script mapScript = null;
Script combineScript = null; Script combineScript = null;
@ -237,7 +234,6 @@ public class ScriptedMetricAggregationBuilder extends AbstractAggregationBuilder
XContentParser.Token token; XContentParser.Token token;
String currentFieldName = null; String currentFieldName = null;
XContentParser parser = context.parser();
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) { if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName(); currentFieldName = parser.currentName();

View File

@ -23,7 +23,7 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.AggregatorFactory;
@ -41,14 +41,14 @@ import java.io.IOException;
public class StatsAggregationBuilder extends ValuesSourceAggregationBuilder.LeafOnly<ValuesSource.Numeric, StatsAggregationBuilder> { public class StatsAggregationBuilder extends ValuesSourceAggregationBuilder.LeafOnly<ValuesSource.Numeric, StatsAggregationBuilder> {
public static final String NAME = "stats"; public static final String NAME = "stats";
private static final ObjectParser<StatsAggregationBuilder, QueryParseContext> PARSER; private static final ObjectParser<StatsAggregationBuilder, Void> PARSER;
static { static {
PARSER = new ObjectParser<>(StatsAggregationBuilder.NAME); PARSER = new ObjectParser<>(StatsAggregationBuilder.NAME);
ValuesSourceParserHelper.declareNumericFields(PARSER, true, true, false); ValuesSourceParserHelper.declareNumericFields(PARSER, true, true, false);
} }
public static AggregationBuilder parse(String aggregationName, QueryParseContext context) throws IOException { public static AggregationBuilder parse(String aggregationName, XContentParser parser) throws IOException {
return PARSER.parse(context.parser(), new StatsAggregationBuilder(aggregationName), context); return PARSER.parse(parser, new StatsAggregationBuilder(aggregationName), null);
} }
public StatsAggregationBuilder(String name) { public StatsAggregationBuilder(String name) {

View File

@ -23,7 +23,7 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.AggregatorFactory;
@ -43,15 +43,15 @@ public class ExtendedStatsAggregationBuilder
extends ValuesSourceAggregationBuilder.LeafOnly<ValuesSource.Numeric, ExtendedStatsAggregationBuilder> { extends ValuesSourceAggregationBuilder.LeafOnly<ValuesSource.Numeric, ExtendedStatsAggregationBuilder> {
public static final String NAME = "extended_stats"; public static final String NAME = "extended_stats";
private static final ObjectParser<ExtendedStatsAggregationBuilder, QueryParseContext> PARSER; private static final ObjectParser<ExtendedStatsAggregationBuilder, Void> PARSER;
static { static {
PARSER = new ObjectParser<>(ExtendedStatsAggregationBuilder.NAME); PARSER = new ObjectParser<>(ExtendedStatsAggregationBuilder.NAME);
ValuesSourceParserHelper.declareNumericFields(PARSER, true, true, false); ValuesSourceParserHelper.declareNumericFields(PARSER, true, true, false);
PARSER.declareDouble(ExtendedStatsAggregationBuilder::sigma, ExtendedStatsAggregator.SIGMA_FIELD); PARSER.declareDouble(ExtendedStatsAggregationBuilder::sigma, ExtendedStatsAggregator.SIGMA_FIELD);
} }
public static AggregationBuilder parse(String aggregationName, QueryParseContext context) throws IOException { public static AggregationBuilder parse(String aggregationName, XContentParser parser) throws IOException {
return PARSER.parse(context.parser(), new ExtendedStatsAggregationBuilder(aggregationName), context); return PARSER.parse(parser, new ExtendedStatsAggregationBuilder(aggregationName), null);
} }
private double sigma = 2.0; private double sigma = 2.0;

View File

@ -23,7 +23,7 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.AggregatorFactory;
@ -41,14 +41,14 @@ import java.io.IOException;
public class SumAggregationBuilder extends ValuesSourceAggregationBuilder.LeafOnly<ValuesSource.Numeric, SumAggregationBuilder> { public class SumAggregationBuilder extends ValuesSourceAggregationBuilder.LeafOnly<ValuesSource.Numeric, SumAggregationBuilder> {
public static final String NAME = "sum"; public static final String NAME = "sum";
private static final ObjectParser<SumAggregationBuilder, QueryParseContext> PARSER; private static final ObjectParser<SumAggregationBuilder, Void> PARSER;
static { static {
PARSER = new ObjectParser<>(SumAggregationBuilder.NAME); PARSER = new ObjectParser<>(SumAggregationBuilder.NAME);
ValuesSourceParserHelper.declareNumericFields(PARSER, true, true, false); ValuesSourceParserHelper.declareNumericFields(PARSER, true, true, false);
} }
public static AggregationBuilder parse(String aggregationName, QueryParseContext context) throws IOException { public static AggregationBuilder parse(String aggregationName, XContentParser parser) throws IOException {
return PARSER.parse(context.parser(), new SumAggregationBuilder(aggregationName), context); return PARSER.parse(parser, new SumAggregationBuilder(aggregationName), null);
} }
public SumAggregationBuilder(String name) { public SumAggregationBuilder(String name) {

View File

@ -26,7 +26,6 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.script.Script; import org.elasticsearch.script.Script;
import org.elasticsearch.script.SearchScript; import org.elasticsearch.script.SearchScript;
@ -595,11 +594,10 @@ public class TopHitsAggregationBuilder extends AbstractAggregationBuilder<TopHit
return builder; return builder;
} }
public static TopHitsAggregationBuilder parse(String aggregationName, QueryParseContext context) throws IOException { public static TopHitsAggregationBuilder parse(String aggregationName, XContentParser parser) throws IOException {
TopHitsAggregationBuilder factory = new TopHitsAggregationBuilder(aggregationName); TopHitsAggregationBuilder factory = new TopHitsAggregationBuilder(aggregationName);
XContentParser.Token token; XContentParser.Token token;
String currentFieldName = null; String currentFieldName = null;
XContentParser parser = context.parser();
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) { if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName(); currentFieldName = parser.currentName();
@ -615,10 +613,10 @@ public class TopHitsAggregationBuilder extends AbstractAggregationBuilder<TopHit
} else if (SearchSourceBuilder.TRACK_SCORES_FIELD.match(currentFieldName)) { } else if (SearchSourceBuilder.TRACK_SCORES_FIELD.match(currentFieldName)) {
factory.trackScores(parser.booleanValue()); factory.trackScores(parser.booleanValue());
} else if (SearchSourceBuilder._SOURCE_FIELD.match(currentFieldName)) { } else if (SearchSourceBuilder._SOURCE_FIELD.match(currentFieldName)) {
factory.fetchSource(FetchSourceContext.fromXContent(context.parser())); factory.fetchSource(FetchSourceContext.fromXContent(parser));
} else if (SearchSourceBuilder.STORED_FIELDS_FIELD.match(currentFieldName)) { } else if (SearchSourceBuilder.STORED_FIELDS_FIELD.match(currentFieldName)) {
factory.storedFieldsContext = factory.storedFieldsContext =
StoredFieldsContext.fromXContent(SearchSourceBuilder.STORED_FIELDS_FIELD.getPreferredName(), context); StoredFieldsContext.fromXContent(SearchSourceBuilder.STORED_FIELDS_FIELD.getPreferredName(), parser);
} else if (SearchSourceBuilder.SORT_FIELD.match(currentFieldName)) { } else if (SearchSourceBuilder.SORT_FIELD.match(currentFieldName)) {
factory.sort(parser.text()); factory.sort(parser.text());
} else { } else {
@ -627,7 +625,7 @@ public class TopHitsAggregationBuilder extends AbstractAggregationBuilder<TopHit
} }
} else if (token == XContentParser.Token.START_OBJECT) { } else if (token == XContentParser.Token.START_OBJECT) {
if (SearchSourceBuilder._SOURCE_FIELD.match(currentFieldName)) { if (SearchSourceBuilder._SOURCE_FIELD.match(currentFieldName)) {
factory.fetchSource(FetchSourceContext.fromXContent(context.parser())); factory.fetchSource(FetchSourceContext.fromXContent(parser));
} else if (SearchSourceBuilder.SCRIPT_FIELDS_FIELD.match(currentFieldName)) { } else if (SearchSourceBuilder.SCRIPT_FIELDS_FIELD.match(currentFieldName)) {
List<ScriptField> scriptFields = new ArrayList<>(); List<ScriptField> scriptFields = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
@ -682,7 +680,7 @@ public class TopHitsAggregationBuilder extends AbstractAggregationBuilder<TopHit
if (SearchSourceBuilder.STORED_FIELDS_FIELD.match(currentFieldName)) { if (SearchSourceBuilder.STORED_FIELDS_FIELD.match(currentFieldName)) {
factory.storedFieldsContext = factory.storedFieldsContext =
StoredFieldsContext.fromXContent(SearchSourceBuilder.STORED_FIELDS_FIELD.getPreferredName(), context); StoredFieldsContext.fromXContent(SearchSourceBuilder.STORED_FIELDS_FIELD.getPreferredName(), parser);
} else if (SearchSourceBuilder.DOCVALUE_FIELDS_FIELD.match(currentFieldName)) { } else if (SearchSourceBuilder.DOCVALUE_FIELDS_FIELD.match(currentFieldName)) {
List<String> fieldDataFields = new ArrayList<>(); List<String> fieldDataFields = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
@ -698,7 +696,7 @@ public class TopHitsAggregationBuilder extends AbstractAggregationBuilder<TopHit
List<SortBuilder<?>> sorts = SortBuilder.fromXContent(parser); List<SortBuilder<?>> sorts = SortBuilder.fromXContent(parser);
factory.sorts(sorts); factory.sorts(sorts);
} else if (SearchSourceBuilder._SOURCE_FIELD.match(currentFieldName)) { } else if (SearchSourceBuilder._SOURCE_FIELD.match(currentFieldName)) {
factory.fetchSource(FetchSourceContext.fromXContent(context.parser())); factory.fetchSource(FetchSourceContext.fromXContent(parser));
} else { } else {
throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + " in [" + currentFieldName + "].", throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + " in [" + currentFieldName + "].",
parser.getTokenLocation()); parser.getTokenLocation());

View File

@ -23,7 +23,7 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.AggregatorFactory;
@ -40,14 +40,14 @@ import java.io.IOException;
public class ValueCountAggregationBuilder extends ValuesSourceAggregationBuilder.LeafOnly<ValuesSource, ValueCountAggregationBuilder> { public class ValueCountAggregationBuilder extends ValuesSourceAggregationBuilder.LeafOnly<ValuesSource, ValueCountAggregationBuilder> {
public static final String NAME = "value_count"; public static final String NAME = "value_count";
private static final ObjectParser<ValueCountAggregationBuilder, QueryParseContext> PARSER; private static final ObjectParser<ValueCountAggregationBuilder, Void> PARSER;
static { static {
PARSER = new ObjectParser<>(ValueCountAggregationBuilder.NAME); PARSER = new ObjectParser<>(ValueCountAggregationBuilder.NAME);
ValuesSourceParserHelper.declareAnyFields(PARSER, true, true); ValuesSourceParserHelper.declareAnyFields(PARSER, true, true);
} }
public static AggregationBuilder parse(String aggregationName, QueryParseContext context) throws IOException { public static AggregationBuilder parse(String aggregationName, XContentParser parser) throws IOException {
return PARSER.parse(context.parser(), new ValueCountAggregationBuilder(aggregationName, null), context); return PARSER.parse(parser, new ValueCountAggregationBuilder(aggregationName, null), null);
} }
public ValueCountAggregationBuilder(String name, ValueType targetValueType) { public ValueCountAggregationBuilder(String name, ValueType targetValueType) {

View File

@ -24,7 +24,6 @@ import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentLocation; import org.elasticsearch.common.xcontent.XContentLocation;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.search.aggregations.AggregationExecutionException; import org.elasticsearch.search.aggregations.AggregationExecutionException;
import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation;
import org.elasticsearch.search.aggregations.InvalidAggregationPathException; import org.elasticsearch.search.aggregations.InvalidAggregationPathException;
@ -58,11 +57,11 @@ public class BucketHelpers {
/** /**
* Parse a string GapPolicy into the byte enum * Parse a string GapPolicy into the byte enum
* *
* @param context SearchContext this is taking place in * @param text
* @param text GapPolicy in string format (e.g. "ignore") * GapPolicy in string format (e.g. "ignore")
* @return GapPolicy enum * @return GapPolicy enum
*/ */
public static GapPolicy parse(QueryParseContext context, String text, XContentLocation tokenLocation) { public static GapPolicy parse(String text, XContentLocation tokenLocation) {
GapPolicy result = null; GapPolicy result = null;
for (GapPolicy policy : values()) { for (GapPolicy policy : values()) {
if (policy.parseField.match(text)) { if (policy.parseField.match(text)) {

View File

@ -24,7 +24,7 @@ import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.io.stream.NamedWriteable;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.InternalAggregation.ReduceContext; import org.elasticsearch.search.aggregations.InternalAggregation.ReduceContext;
import org.elasticsearch.search.aggregations.PipelineAggregationBuilder; import org.elasticsearch.search.aggregations.PipelineAggregationBuilder;
@ -34,7 +34,7 @@ import java.util.Map;
public abstract class PipelineAggregator implements NamedWriteable { public abstract class PipelineAggregator implements NamedWriteable {
/** /**
* Parse the {@link PipelineAggregationBuilder} from a {@link QueryParseContext}. * Parse the {@link PipelineAggregationBuilder} from a {@link XContentParser}.
*/ */
@FunctionalInterface @FunctionalInterface
public interface Parser { public interface Parser {
@ -48,13 +48,12 @@ public abstract class PipelineAggregator implements NamedWriteable {
* *
* @param pipelineAggregatorName * @param pipelineAggregatorName
* The name of the pipeline aggregation * The name of the pipeline aggregation
* @param context * @param parser the parser
* The search context
* @return The resolved pipeline aggregator factory * @return The resolved pipeline aggregator factory
* @throws java.io.IOException * @throws java.io.IOException
* When parsing fails * When parsing fails
*/ */
PipelineAggregationBuilder parse(String pipelineAggregatorName, QueryParseContext context) PipelineAggregationBuilder parse(String pipelineAggregatorName, XContentParser parser)
throws IOException; throws IOException;
} }

View File

@ -22,7 +22,6 @@ package org.elasticsearch.search.aggregations.pipeline.bucketmetrics;
import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
@ -44,9 +43,8 @@ public abstract class BucketMetricsParser implements PipelineAggregator.Parser {
} }
@Override @Override
public final BucketMetricsPipelineAggregationBuilder<?> parse(String pipelineAggregatorName, QueryParseContext context) public final BucketMetricsPipelineAggregationBuilder<?> parse(String pipelineAggregatorName, XContentParser parser)
throws IOException { throws IOException {
XContentParser parser = context.parser();
XContentParser.Token token; XContentParser.Token token;
String currentFieldName = null; String currentFieldName = null;
String[] bucketsPaths = null; String[] bucketsPaths = null;
@ -63,9 +61,9 @@ public abstract class BucketMetricsParser implements PipelineAggregator.Parser {
} else if (BUCKETS_PATH.match(currentFieldName)) { } else if (BUCKETS_PATH.match(currentFieldName)) {
bucketsPaths = new String[] { parser.text() }; bucketsPaths = new String[] { parser.text() };
} else if (GAP_POLICY.match(currentFieldName)) { } else if (GAP_POLICY.match(currentFieldName)) {
gapPolicy = GapPolicy.parse(context, parser.text(), parser.getTokenLocation()); gapPolicy = GapPolicy.parse(parser.text(), parser.getTokenLocation());
} else { } else {
parseToken(pipelineAggregatorName, parser, context, currentFieldName, token, params); parseToken(pipelineAggregatorName, parser, currentFieldName, token, params);
} }
} else if (token == XContentParser.Token.START_ARRAY) { } else if (token == XContentParser.Token.START_ARRAY) {
if (BUCKETS_PATH.match(currentFieldName)) { if (BUCKETS_PATH.match(currentFieldName)) {
@ -76,10 +74,10 @@ public abstract class BucketMetricsParser implements PipelineAggregator.Parser {
} }
bucketsPaths = paths.toArray(new String[paths.size()]); bucketsPaths = paths.toArray(new String[paths.size()]);
} else { } else {
parseToken(pipelineAggregatorName, parser, context, currentFieldName, token, params); parseToken(pipelineAggregatorName, parser, currentFieldName, token, params);
} }
} else { } else {
parseToken(pipelineAggregatorName, parser, context, currentFieldName, token, params); parseToken(pipelineAggregatorName, parser, currentFieldName, token, params);
} }
} }
@ -104,14 +102,14 @@ public abstract class BucketMetricsParser implements PipelineAggregator.Parser {
protected abstract BucketMetricsPipelineAggregationBuilder<?> buildFactory(String pipelineAggregatorName, String bucketsPaths, protected abstract BucketMetricsPipelineAggregationBuilder<?> buildFactory(String pipelineAggregatorName, String bucketsPaths,
Map<String, Object> params); Map<String, Object> params);
protected boolean token(XContentParser parser, QueryParseContext context, String field, protected boolean token(XContentParser parser, String field,
XContentParser.Token token, Map<String, Object> params) throws IOException { XContentParser.Token token, Map<String, Object> params) throws IOException {
return false; return false;
} }
private void parseToken(String aggregationName, XContentParser parser, QueryParseContext context, String currentFieldName, private void parseToken(String aggregationName, XContentParser parser, String currentFieldName,
XContentParser.Token currentToken, Map<String, Object> params) throws IOException { XContentParser.Token currentToken, Map<String, Object> params) throws IOException {
if (token(parser, context, currentFieldName, currentToken, params) == false) { if (token(parser, currentFieldName, currentToken, params) == false) {
throw new ParsingException(parser.getTokenLocation(), throw new ParsingException(parser.getTokenLocation(),
"Unexpected token " + currentToken + " [" + currentFieldName + "] in [" + aggregationName + "]"); "Unexpected token " + currentToken + " [" + currentFieldName + "] in [" + aggregationName + "]");
} }

View File

@ -26,7 +26,6 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.PipelineAggregationBuilder; import org.elasticsearch.search.aggregations.PipelineAggregationBuilder;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
@ -136,8 +135,8 @@ public class PercentilesBucketPipelineAggregationBuilder
} }
@Override @Override
protected boolean token(XContentParser parser, QueryParseContext context, String field, protected boolean token(XContentParser parser, String field, XContentParser.Token token, Map<String, Object> params)
XContentParser.Token token, Map<String, Object> params) throws IOException { throws IOException {
if (PERCENTS_FIELD.match(field) && token == XContentParser.Token.START_ARRAY) { if (PERCENTS_FIELD.match(field) && token == XContentParser.Token.START_ARRAY) {
DoubleArrayList percents = new DoubleArrayList(10); DoubleArrayList percents = new DoubleArrayList(10);
while (parser.nextToken() != XContentParser.Token.END_ARRAY) { while (parser.nextToken() != XContentParser.Token.END_ARRAY) {

View File

@ -21,7 +21,6 @@ package org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.exten
import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.BucketMetricsParser; import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.BucketMetricsParser;
import java.io.IOException; import java.io.IOException;
@ -44,7 +43,7 @@ public class ExtendedStatsBucketParser extends BucketMetricsParser {
} }
@Override @Override
protected boolean token(XContentParser parser, QueryParseContext context, String field, protected boolean token(XContentParser parser, String field,
XContentParser.Token token, Map<String, Object> params) throws IOException { XContentParser.Token token, Map<String, Object> params) throws IOException {
if (SIGMA.match(field) && token == XContentParser.Token.VALUE_NUMBER) { if (SIGMA.match(field) && token == XContentParser.Token.VALUE_NUMBER) {
params.put(SIGMA.getPreferredName(), parser.doubleValue()); params.put(SIGMA.getPreferredName(), parser.doubleValue());

View File

@ -24,7 +24,6 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.script.Script; import org.elasticsearch.script.Script;
import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.pipeline.AbstractPipelineAggregationBuilder; import org.elasticsearch.search.aggregations.pipeline.AbstractPipelineAggregationBuilder;
@ -68,7 +67,7 @@ public class BucketScriptPipelineAggregationBuilder extends AbstractPipelineAggr
public BucketScriptPipelineAggregationBuilder(StreamInput in) throws IOException { public BucketScriptPipelineAggregationBuilder(StreamInput in) throws IOException {
super(in, NAME); super(in, NAME);
int mapSize = in.readVInt(); int mapSize = in.readVInt();
bucketsPathsMap = new HashMap<String, String>(mapSize); bucketsPathsMap = new HashMap<>(mapSize);
for (int i = 0; i < mapSize; i++) { for (int i = 0; i < mapSize; i++) {
bucketsPathsMap.put(in.readString(), in.readString()); bucketsPathsMap.put(in.readString(), in.readString());
} }
@ -157,8 +156,7 @@ public class BucketScriptPipelineAggregationBuilder extends AbstractPipelineAggr
return builder; return builder;
} }
public static BucketScriptPipelineAggregationBuilder parse(String reducerName, QueryParseContext context) throws IOException { public static BucketScriptPipelineAggregationBuilder parse(String reducerName, XContentParser parser) throws IOException {
XContentParser parser = context.parser();
XContentParser.Token token; XContentParser.Token token;
Script script = null; Script script = null;
String currentFieldName = null; String currentFieldName = null;
@ -176,7 +174,7 @@ public class BucketScriptPipelineAggregationBuilder extends AbstractPipelineAggr
bucketsPathsMap = new HashMap<>(); bucketsPathsMap = new HashMap<>();
bucketsPathsMap.put("_value", parser.text()); bucketsPathsMap.put("_value", parser.text());
} else if (GAP_POLICY.match(currentFieldName)) { } else if (GAP_POLICY.match(currentFieldName)) {
gapPolicy = GapPolicy.parse(context, parser.text(), parser.getTokenLocation()); gapPolicy = GapPolicy.parse(parser.text(), parser.getTokenLocation());
} else if (Script.SCRIPT_PARSE_FIELD.match(currentFieldName)) { } else if (Script.SCRIPT_PARSE_FIELD.match(currentFieldName)) {
script = Script.parse(parser); script = Script.parse(parser);
} else { } else {

View File

@ -24,7 +24,6 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.script.Script; import org.elasticsearch.script.Script;
import org.elasticsearch.search.aggregations.pipeline.AbstractPipelineAggregationBuilder; import org.elasticsearch.search.aggregations.pipeline.AbstractPipelineAggregationBuilder;
import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy;
@ -65,7 +64,7 @@ public class BucketSelectorPipelineAggregationBuilder extends AbstractPipelineAg
public BucketSelectorPipelineAggregationBuilder(StreamInput in) throws IOException { public BucketSelectorPipelineAggregationBuilder(StreamInput in) throws IOException {
super(in, NAME); super(in, NAME);
int mapSize = in.readVInt(); int mapSize = in.readVInt();
bucketsPathsMap = new HashMap<String, String>(mapSize); bucketsPathsMap = new HashMap<>(mapSize);
for (int i = 0; i < mapSize; i++) { for (int i = 0; i < mapSize; i++) {
bucketsPathsMap.put(in.readString(), in.readString()); bucketsPathsMap.put(in.readString(), in.readString());
} }
@ -123,8 +122,7 @@ public class BucketSelectorPipelineAggregationBuilder extends AbstractPipelineAg
return builder; return builder;
} }
public static BucketSelectorPipelineAggregationBuilder parse(String reducerName, QueryParseContext context) throws IOException { public static BucketSelectorPipelineAggregationBuilder parse(String reducerName, XContentParser parser) throws IOException {
XContentParser parser = context.parser();
XContentParser.Token token; XContentParser.Token token;
Script script = null; Script script = null;
String currentFieldName = null; String currentFieldName = null;
@ -139,7 +137,7 @@ public class BucketSelectorPipelineAggregationBuilder extends AbstractPipelineAg
bucketsPathsMap = new HashMap<>(); bucketsPathsMap = new HashMap<>();
bucketsPathsMap.put("_value", parser.text()); bucketsPathsMap.put("_value", parser.text());
} else if (GAP_POLICY.match(currentFieldName)) { } else if (GAP_POLICY.match(currentFieldName)) {
gapPolicy = GapPolicy.parse(context, parser.text(), parser.getTokenLocation()); gapPolicy = GapPolicy.parse(parser.text(), parser.getTokenLocation());
} else if (Script.SCRIPT_PARSE_FIELD.match(currentFieldName)) { } else if (Script.SCRIPT_PARSE_FIELD.match(currentFieldName)) {
script = Script.parse(parser); script = Script.parse(parser);
} else { } else {

View File

@ -24,7 +24,6 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.PipelineAggregationBuilder; import org.elasticsearch.search.aggregations.PipelineAggregationBuilder;
@ -129,9 +128,8 @@ public class CumulativeSumPipelineAggregationBuilder extends AbstractPipelineAgg
return builder; return builder;
} }
public static CumulativeSumPipelineAggregationBuilder parse(String pipelineAggregatorName, QueryParseContext context) public static CumulativeSumPipelineAggregationBuilder parse(String pipelineAggregatorName, XContentParser parser)
throws IOException { throws IOException {
XContentParser parser = context.parser();
XContentParser.Token token; XContentParser.Token token;
String currentFieldName = null; String currentFieldName = null;
String[] bucketsPaths = null; String[] bucketsPaths = null;

View File

@ -27,12 +27,11 @@ import org.elasticsearch.common.rounding.DateTimeUnit;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.PipelineAggregationBuilder; import org.elasticsearch.search.aggregations.PipelineAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregatorFactory;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregatorFactory;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregatorFactory; import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregatorFactory;
import org.elasticsearch.search.aggregations.pipeline.AbstractPipelineAggregationBuilder; import org.elasticsearch.search.aggregations.pipeline.AbstractPipelineAggregationBuilder;
@ -194,8 +193,7 @@ public class DerivativePipelineAggregationBuilder extends AbstractPipelineAggreg
return builder; return builder;
} }
public static DerivativePipelineAggregationBuilder parse(String pipelineAggregatorName, QueryParseContext context) throws IOException { public static DerivativePipelineAggregationBuilder parse(String pipelineAggregatorName, XContentParser parser) throws IOException {
XContentParser parser = context.parser();
XContentParser.Token token; XContentParser.Token token;
String currentFieldName = null; String currentFieldName = null;
String[] bucketsPaths = null; String[] bucketsPaths = null;
@ -212,7 +210,7 @@ public class DerivativePipelineAggregationBuilder extends AbstractPipelineAggreg
} else if (BUCKETS_PATH_FIELD.match(currentFieldName)) { } else if (BUCKETS_PATH_FIELD.match(currentFieldName)) {
bucketsPaths = new String[] { parser.text() }; bucketsPaths = new String[] { parser.text() };
} else if (GAP_POLICY_FIELD.match(currentFieldName)) { } else if (GAP_POLICY_FIELD.match(currentFieldName)) {
gapPolicy = GapPolicy.parse(context, parser.text(), parser.getTokenLocation()); gapPolicy = GapPolicy.parse(parser.text(), parser.getTokenLocation());
} else if (UNIT_FIELD.match(currentFieldName)) { } else if (UNIT_FIELD.match(currentFieldName)) {
units = parser.text(); units = parser.text();
} else { } else {

View File

@ -26,7 +26,6 @@ import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ParseFieldRegistry; import org.elasticsearch.common.xcontent.ParseFieldRegistry;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.PipelineAggregationBuilder; import org.elasticsearch.search.aggregations.PipelineAggregationBuilder;
@ -304,8 +303,7 @@ public class MovAvgPipelineAggregationBuilder extends AbstractPipelineAggregatio
public static MovAvgPipelineAggregationBuilder parse( public static MovAvgPipelineAggregationBuilder parse(
ParseFieldRegistry<MovAvgModel.AbstractModelParser> movingAverageMdelParserRegistry, ParseFieldRegistry<MovAvgModel.AbstractModelParser> movingAverageMdelParserRegistry,
String pipelineAggregatorName, QueryParseContext context) throws IOException { String pipelineAggregatorName, XContentParser parser) throws IOException {
XContentParser parser = context.parser();
XContentParser.Token token; XContentParser.Token token;
String currentFieldName = null; String currentFieldName = null;
String[] bucketsPaths = null; String[] bucketsPaths = null;
@ -344,7 +342,7 @@ public class MovAvgPipelineAggregationBuilder extends AbstractPipelineAggregatio
} else if (BUCKETS_PATH.match(currentFieldName)) { } else if (BUCKETS_PATH.match(currentFieldName)) {
bucketsPaths = new String[] { parser.text() }; bucketsPaths = new String[] { parser.text() };
} else if (GAP_POLICY.match(currentFieldName)) { } else if (GAP_POLICY.match(currentFieldName)) {
gapPolicy = GapPolicy.parse(context, parser.text(), parser.getTokenLocation()); gapPolicy = GapPolicy.parse(parser.text(), parser.getTokenLocation());
} else if (MODEL.match(currentFieldName)) { } else if (MODEL.match(currentFieldName)) {
model = parser.text(); model = parser.text();
} else { } else {

View File

@ -25,7 +25,6 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.pipeline.AbstractPipelineAggregationBuilder; import org.elasticsearch.search.aggregations.pipeline.AbstractPipelineAggregationBuilder;
import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy;
@ -149,8 +148,7 @@ public class SerialDiffPipelineAggregationBuilder extends AbstractPipelineAggreg
return builder; return builder;
} }
public static SerialDiffPipelineAggregationBuilder parse(String reducerName, QueryParseContext context) throws IOException { public static SerialDiffPipelineAggregationBuilder parse(String reducerName, XContentParser parser) throws IOException {
XContentParser parser = context.parser();
XContentParser.Token token; XContentParser.Token token;
String currentFieldName = null; String currentFieldName = null;
String[] bucketsPaths = null; String[] bucketsPaths = null;
@ -167,7 +165,7 @@ public class SerialDiffPipelineAggregationBuilder extends AbstractPipelineAggreg
} else if (BUCKETS_PATH.match(currentFieldName)) { } else if (BUCKETS_PATH.match(currentFieldName)) {
bucketsPaths = new String[] { parser.text() }; bucketsPaths = new String[] { parser.text() };
} else if (GAP_POLICY.match(currentFieldName)) { } else if (GAP_POLICY.match(currentFieldName)) {
gapPolicy = GapPolicy.parse(context, parser.text(), parser.getTokenLocation()); gapPolicy = GapPolicy.parse(parser.text(), parser.getTokenLocation());
} else { } else {
throw new ParsingException(parser.getTokenLocation(), throw new ParsingException(parser.getTokenLocation(),
"Unknown key for a " + token + " in [" + reducerName + "]: [" + currentFieldName + "]."); "Unknown key for a " + token + " in [" + reducerName + "]: [" + currentFieldName + "].");

View File

@ -23,7 +23,6 @@ import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.script.Script; import org.elasticsearch.script.Script;
import org.joda.time.DateTimeZone; import org.joda.time.DateTimeZone;
@ -33,31 +32,31 @@ public final class ValuesSourceParserHelper {
private ValuesSourceParserHelper() {} // utility class, no instantiation private ValuesSourceParserHelper() {} // utility class, no instantiation
public static void declareAnyFields( public static void declareAnyFields(
ObjectParser<? extends ValuesSourceAggregationBuilder<ValuesSource, ?>, QueryParseContext> objectParser, ObjectParser<? extends ValuesSourceAggregationBuilder<ValuesSource, ?>, Void> objectParser,
boolean scriptable, boolean formattable) { boolean scriptable, boolean formattable) {
declareFields(objectParser, scriptable, formattable, false, null); declareFields(objectParser, scriptable, formattable, false, null);
} }
public static void declareNumericFields( public static void declareNumericFields(
ObjectParser<? extends ValuesSourceAggregationBuilder<ValuesSource.Numeric, ?>, QueryParseContext> objectParser, ObjectParser<? extends ValuesSourceAggregationBuilder<ValuesSource.Numeric, ?>, Void> objectParser,
boolean scriptable, boolean formattable, boolean timezoneAware) { boolean scriptable, boolean formattable, boolean timezoneAware) {
declareFields(objectParser, scriptable, formattable, timezoneAware, ValueType.NUMERIC); declareFields(objectParser, scriptable, formattable, timezoneAware, ValueType.NUMERIC);
} }
public static void declareBytesFields( public static void declareBytesFields(
ObjectParser<? extends ValuesSourceAggregationBuilder<ValuesSource.Bytes, ?>, QueryParseContext> objectParser, ObjectParser<? extends ValuesSourceAggregationBuilder<ValuesSource.Bytes, ?>, Void> objectParser,
boolean scriptable, boolean formattable) { boolean scriptable, boolean formattable) {
declareFields(objectParser, scriptable, formattable, false, ValueType.STRING); declareFields(objectParser, scriptable, formattable, false, ValueType.STRING);
} }
public static void declareGeoFields( public static void declareGeoFields(
ObjectParser<? extends ValuesSourceAggregationBuilder<ValuesSource.GeoPoint, ?>, QueryParseContext> objectParser, ObjectParser<? extends ValuesSourceAggregationBuilder<ValuesSource.GeoPoint, ?>, Void> objectParser,
boolean scriptable, boolean formattable) { boolean scriptable, boolean formattable) {
declareFields(objectParser, scriptable, formattable, false, ValueType.GEOPOINT); declareFields(objectParser, scriptable, formattable, false, ValueType.GEOPOINT);
} }
private static <VS extends ValuesSource> void declareFields( private static <VS extends ValuesSource> void declareFields(
ObjectParser<? extends ValuesSourceAggregationBuilder<VS, ?>, QueryParseContext> objectParser, ObjectParser<? extends ValuesSourceAggregationBuilder<VS, ?>, Void> objectParser,
boolean scriptable, boolean formattable, boolean timezoneAware, ValueType targetValueType) { boolean scriptable, boolean formattable, boolean timezoneAware, ValueType targetValueType) {

View File

@ -36,7 +36,6 @@ import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.script.Script; import org.elasticsearch.script.Script;
import org.elasticsearch.search.SearchExtBuilder; import org.elasticsearch.search.SearchExtBuilder;
@ -109,9 +108,9 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
public static final ParseField SLICE = new ParseField("slice"); public static final ParseField SLICE = new ParseField("slice");
public static final ParseField ALL_FIELDS_FIELDS = new ParseField("all_fields"); public static final ParseField ALL_FIELDS_FIELDS = new ParseField("all_fields");
public static SearchSourceBuilder fromXContent(QueryParseContext context) throws IOException { public static SearchSourceBuilder fromXContent(XContentParser parser) throws IOException {
SearchSourceBuilder builder = new SearchSourceBuilder(); SearchSourceBuilder builder = new SearchSourceBuilder();
builder.parseXContent(context); builder.parseXContent(parser);
return builder; return builder;
} }
@ -959,10 +958,9 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
/** /**
* Parse some xContent into this SearchSourceBuilder, overwriting any values specified in the xContent. Use this if you need to set up * Parse some xContent into this SearchSourceBuilder, overwriting any values specified in the xContent. Use this if you need to set up
* different defaults than a regular SearchSourceBuilder would have and use * different defaults than a regular SearchSourceBuilder would have and use
* {@link #fromXContent(QueryParseContext)} if you have normal defaults. * {@link #fromXContent(XContentParser)} if you have normal defaults.
*/ */
public void parseXContent(QueryParseContext context) throws IOException { public void parseXContent(XContentParser parser) throws IOException {
XContentParser parser = context.parser();
XContentParser.Token token = parser.currentToken(); XContentParser.Token token = parser.currentToken();
String currentFieldName = null; String currentFieldName = null;
if (token != XContentParser.Token.START_OBJECT && (token = parser.nextToken()) != XContentParser.Token.START_OBJECT) { if (token != XContentParser.Token.START_OBJECT && (token = parser.nextToken()) != XContentParser.Token.START_OBJECT) {
@ -992,10 +990,10 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
} else if (TRACK_TOTAL_HITS_FIELD.match(currentFieldName)) { } else if (TRACK_TOTAL_HITS_FIELD.match(currentFieldName)) {
trackTotalHits = parser.booleanValue(); trackTotalHits = parser.booleanValue();
} else if (_SOURCE_FIELD.match(currentFieldName)) { } else if (_SOURCE_FIELD.match(currentFieldName)) {
fetchSourceContext = FetchSourceContext.fromXContent(context.parser()); fetchSourceContext = FetchSourceContext.fromXContent(parser);
} else if (STORED_FIELDS_FIELD.match(currentFieldName)) { } else if (STORED_FIELDS_FIELD.match(currentFieldName)) {
storedFieldsContext = storedFieldsContext =
StoredFieldsContext.fromXContent(SearchSourceBuilder.STORED_FIELDS_FIELD.getPreferredName(), context); StoredFieldsContext.fromXContent(SearchSourceBuilder.STORED_FIELDS_FIELD.getPreferredName(), parser);
} else if (SORT_FIELD.match(currentFieldName)) { } else if (SORT_FIELD.match(currentFieldName)) {
sort(parser.text()); sort(parser.text());
} else if (PROFILE_FIELD.match(currentFieldName)) { } else if (PROFILE_FIELD.match(currentFieldName)) {
@ -1014,7 +1012,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
} else if (POST_FILTER_FIELD.match(currentFieldName)) { } else if (POST_FILTER_FIELD.match(currentFieldName)) {
postQueryBuilder = parseInnerQueryBuilder(parser); postQueryBuilder = parseInnerQueryBuilder(parser);
} else if (_SOURCE_FIELD.match(currentFieldName)) { } else if (_SOURCE_FIELD.match(currentFieldName)) {
fetchSourceContext = FetchSourceContext.fromXContent(context.parser()); fetchSourceContext = FetchSourceContext.fromXContent(parser);
} else if (SCRIPT_FIELDS_FIELD.match(currentFieldName)) { } else if (SCRIPT_FIELDS_FIELD.match(currentFieldName)) {
scriptFields = new ArrayList<>(); scriptFields = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
@ -1035,16 +1033,16 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
} }
} else if (AGGREGATIONS_FIELD.match(currentFieldName) } else if (AGGREGATIONS_FIELD.match(currentFieldName)
|| AGGS_FIELD.match(currentFieldName)) { || AGGS_FIELD.match(currentFieldName)) {
aggregations = AggregatorFactories.parseAggregators(context); aggregations = AggregatorFactories.parseAggregators(parser);
} else if (HIGHLIGHT_FIELD.match(currentFieldName)) { } else if (HIGHLIGHT_FIELD.match(currentFieldName)) {
highlightBuilder = HighlightBuilder.fromXContent(parser); highlightBuilder = HighlightBuilder.fromXContent(parser);
} else if (SUGGEST_FIELD.match(currentFieldName)) { } else if (SUGGEST_FIELD.match(currentFieldName)) {
suggestBuilder = SuggestBuilder.fromXContent(context.parser()); suggestBuilder = SuggestBuilder.fromXContent(parser);
} else if (SORT_FIELD.match(currentFieldName)) { } else if (SORT_FIELD.match(currentFieldName)) {
sorts = new ArrayList<>(SortBuilder.fromXContent(parser)); sorts = new ArrayList<>(SortBuilder.fromXContent(parser));
} else if (RESCORE_FIELD.match(currentFieldName)) { } else if (RESCORE_FIELD.match(currentFieldName)) {
rescoreBuilders = new ArrayList<>(); rescoreBuilders = new ArrayList<>();
rescoreBuilders.add(RescoreBuilder.parseFromXContent(context)); rescoreBuilders.add(RescoreBuilder.parseFromXContent(parser));
} else if (EXT_FIELD.match(currentFieldName)) { } else if (EXT_FIELD.match(currentFieldName)) {
extBuilders = new ArrayList<>(); extBuilders = new ArrayList<>();
String extSectionName = null; String extSectionName = null;
@ -1062,16 +1060,16 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
} }
} }
} else if (SLICE.match(currentFieldName)) { } else if (SLICE.match(currentFieldName)) {
sliceBuilder = SliceBuilder.fromXContent(context); sliceBuilder = SliceBuilder.fromXContent(parser);
} else if (COLLAPSE.match(currentFieldName)) { } else if (COLLAPSE.match(currentFieldName)) {
collapse = CollapseBuilder.fromXContent(context); collapse = CollapseBuilder.fromXContent(parser);
} else { } else {
throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + " in [" + currentFieldName + "].", throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + " in [" + currentFieldName + "].",
parser.getTokenLocation()); parser.getTokenLocation());
} }
} else if (token == XContentParser.Token.START_ARRAY) { } else if (token == XContentParser.Token.START_ARRAY) {
if (STORED_FIELDS_FIELD.match(currentFieldName)) { if (STORED_FIELDS_FIELD.match(currentFieldName)) {
storedFieldsContext = StoredFieldsContext.fromXContent(STORED_FIELDS_FIELD.getPreferredName(), context); storedFieldsContext = StoredFieldsContext.fromXContent(STORED_FIELDS_FIELD.getPreferredName(), parser);
} else if (DOCVALUE_FIELDS_FIELD.match(currentFieldName)) { } else if (DOCVALUE_FIELDS_FIELD.match(currentFieldName)) {
docValueFields = new ArrayList<>(); docValueFields = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
@ -1084,14 +1082,14 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
} }
} else if (INDICES_BOOST_FIELD.match(currentFieldName)) { } else if (INDICES_BOOST_FIELD.match(currentFieldName)) {
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
indexBoosts.add(new IndexBoost(context)); indexBoosts.add(new IndexBoost(parser));
} }
} else if (SORT_FIELD.match(currentFieldName)) { } else if (SORT_FIELD.match(currentFieldName)) {
sorts = new ArrayList<>(SortBuilder.fromXContent(parser)); sorts = new ArrayList<>(SortBuilder.fromXContent(parser));
} else if (RESCORE_FIELD.match(currentFieldName)) { } else if (RESCORE_FIELD.match(currentFieldName)) {
rescoreBuilders = new ArrayList<>(); rescoreBuilders = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
rescoreBuilders.add(RescoreBuilder.parseFromXContent(context)); rescoreBuilders.add(RescoreBuilder.parseFromXContent(parser));
} }
} else if (STATS_FIELD.match(currentFieldName)) { } else if (STATS_FIELD.match(currentFieldName)) {
stats = new ArrayList<>(); stats = new ArrayList<>();
@ -1104,7 +1102,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
} }
} }
} else if (_SOURCE_FIELD.match(currentFieldName)) { } else if (_SOURCE_FIELD.match(currentFieldName)) {
fetchSourceContext = FetchSourceContext.fromXContent(context.parser()); fetchSourceContext = FetchSourceContext.fromXContent(parser);
} else if (SEARCH_AFTER.match(currentFieldName)) { } else if (SEARCH_AFTER.match(currentFieldName)) {
searchAfterBuilder = SearchAfterBuilder.fromXContent(parser); searchAfterBuilder = SearchAfterBuilder.fromXContent(parser);
} else if (FIELDS_FIELD.match(currentFieldName)) { } else if (FIELDS_FIELD.match(currentFieldName)) {
@ -1276,8 +1274,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
boost = in.readFloat(); boost = in.readFloat();
} }
IndexBoost(QueryParseContext context) throws IOException { IndexBoost(XContentParser parser) throws IOException {
XContentParser parser = context.parser();
XContentParser.Token token = parser.currentToken(); XContentParser.Token token = parser.currentToken();
if (token == XContentParser.Token.START_OBJECT) { if (token == XContentParser.Token.START_OBJECT) {

View File

@ -35,7 +35,6 @@ import org.elasticsearch.index.mapper.KeywordFieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.mapper.NumberFieldMapper;
import org.elasticsearch.index.query.InnerHitBuilder; import org.elasticsearch.index.query.InnerHitBuilder;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.search.SearchContextException; import org.elasticsearch.search.SearchContextException;
import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.SearchContext;
@ -52,7 +51,7 @@ public class CollapseBuilder implements Writeable, ToXContentObject {
public static final ParseField FIELD_FIELD = new ParseField("field"); public static final ParseField FIELD_FIELD = new ParseField("field");
public static final ParseField INNER_HITS_FIELD = new ParseField("inner_hits"); public static final ParseField INNER_HITS_FIELD = new ParseField("inner_hits");
public static final ParseField MAX_CONCURRENT_GROUP_REQUESTS_FIELD = new ParseField("max_concurrent_group_searches"); public static final ParseField MAX_CONCURRENT_GROUP_REQUESTS_FIELD = new ParseField("max_concurrent_group_searches");
private static final ObjectParser<CollapseBuilder, QueryParseContext> PARSER = private static final ObjectParser<CollapseBuilder, Void> PARSER =
new ObjectParser<>("collapse", CollapseBuilder::new); new ObjectParser<>("collapse", CollapseBuilder::new);
static { static {
@ -122,8 +121,8 @@ public class CollapseBuilder implements Writeable, ToXContentObject {
} }
} }
public static CollapseBuilder fromXContent(QueryParseContext context) throws IOException { public static CollapseBuilder fromXContent(XContentParser parser) throws IOException {
CollapseBuilder builder = PARSER.parse(context.parser(), new CollapseBuilder(), context); CollapseBuilder builder = PARSER.parse(parser, new CollapseBuilder(), null);
return builder; return builder;
} }

View File

@ -26,7 +26,6 @@ import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestRequest;
import java.io.IOException; import java.io.IOException;
@ -164,8 +163,7 @@ public class StoredFieldsContext implements Writeable {
return new StoredFieldsContext(fieldNames); return new StoredFieldsContext(fieldNames);
} }
public static StoredFieldsContext fromXContent(String fieldName, QueryParseContext context) throws IOException { public static StoredFieldsContext fromXContent(String fieldName, XContentParser parser) throws IOException {
XContentParser parser = context.parser();
XContentParser.Token token = parser.currentToken(); XContentParser.Token token = parser.currentToken();
if (token == XContentParser.Token.VALUE_STRING) { if (token == XContentParser.Token.VALUE_STRING) {

View File

@ -32,7 +32,6 @@ import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder.BoundaryScannerType; import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder.BoundaryScannerType;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder.Order; import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder.Order;
@ -595,7 +594,7 @@ public abstract class AbstractHighlighterBuilder<HB extends AbstractHighlighterB
} }
static <HB extends AbstractHighlighterBuilder<HB>> BiFunction<XContentParser, HB, HB> setupParser( static <HB extends AbstractHighlighterBuilder<HB>> BiFunction<XContentParser, HB, HB> setupParser(
ObjectParser<HB, QueryParseContext> parser) { ObjectParser<HB, Void> parser) {
parser.declareStringArray(fromList(String.class, HB::preTags), PRE_TAGS_FIELD); parser.declareStringArray(fromList(String.class, HB::preTags), PRE_TAGS_FIELD);
parser.declareStringArray(fromList(String.class, HB::postTags), POST_TAGS_FIELD); parser.declareStringArray(fromList(String.class, HB::postTags), POST_TAGS_FIELD);
parser.declareString(HB::order, ORDER_FIELD); parser.declareString(HB::order, ORDER_FIELD);
@ -612,14 +611,14 @@ public abstract class AbstractHighlighterBuilder<HB extends AbstractHighlighterB
parser.declareInt(HB::noMatchSize, NO_MATCH_SIZE_FIELD); parser.declareInt(HB::noMatchSize, NO_MATCH_SIZE_FIELD);
parser.declareBoolean(HB::forceSource, FORCE_SOURCE_FIELD); parser.declareBoolean(HB::forceSource, FORCE_SOURCE_FIELD);
parser.declareInt(HB::phraseLimit, PHRASE_LIMIT_FIELD); parser.declareInt(HB::phraseLimit, PHRASE_LIMIT_FIELD);
parser.declareObject(HB::options, (XContentParser p, QueryParseContext c) -> { parser.declareObject(HB::options, (XContentParser p, Void c) -> {
try { try {
return p.map(); return p.map();
} catch (IOException e) { } catch (IOException e) {
throw new RuntimeException("Error parsing options", e); throw new RuntimeException("Error parsing options", e);
} }
}, OPTIONS_FIELD); }, OPTIONS_FIELD);
parser.declareObject(HB::highlightQuery, (XContentParser p, QueryParseContext c) -> { parser.declareObject(HB::highlightQuery, (XContentParser p, Void c) -> {
try { try {
return parseInnerQueryBuilder(p); return parseInnerQueryBuilder(p);
} catch (IOException e) { } catch (IOException e) {

View File

@ -30,7 +30,6 @@ import org.elasticsearch.common.xcontent.ObjectParser.NamedObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.fetch.subphase.highlight.SearchContextHighlight.FieldOptions; import org.elasticsearch.search.fetch.subphase.highlight.SearchContextHighlight.FieldOptions;
@ -258,7 +257,7 @@ public class HighlightBuilder extends AbstractHighlighterBuilder<HighlightBuilde
private static final BiFunction<XContentParser, HighlightBuilder, HighlightBuilder> PARSER; private static final BiFunction<XContentParser, HighlightBuilder, HighlightBuilder> PARSER;
static { static {
ObjectParser<HighlightBuilder, QueryParseContext> parser = new ObjectParser<>("highlight"); ObjectParser<HighlightBuilder, Void> parser = new ObjectParser<>("highlight");
parser.declareString(HighlightBuilder::tagsSchema, new ParseField("tags_schema")); parser.declareString(HighlightBuilder::tagsSchema, new ParseField("tags_schema"));
parser.declareString(HighlightBuilder::encoder, ENCODER_FIELD); parser.declareString(HighlightBuilder::encoder, ENCODER_FIELD);
parser.declareNamedObjects(HighlightBuilder::fields, Field.PARSER, (HighlightBuilder hb) -> hb.useExplicitFieldOrder(true), parser.declareNamedObjects(HighlightBuilder::fields, Field.PARSER, (HighlightBuilder hb) -> hb.useExplicitFieldOrder(true),
@ -417,13 +416,13 @@ public class HighlightBuilder extends AbstractHighlighterBuilder<HighlightBuilde
} }
public static class Field extends AbstractHighlighterBuilder<Field> { public static class Field extends AbstractHighlighterBuilder<Field> {
static final NamedObjectParser<Field, QueryParseContext> PARSER; static final NamedObjectParser<Field, Void> PARSER;
static { static {
ObjectParser<Field, QueryParseContext> parser = new ObjectParser<>("highlight_field"); ObjectParser<Field, Void> parser = new ObjectParser<>("highlight_field");
parser.declareInt(Field::fragmentOffset, FRAGMENT_OFFSET_FIELD); parser.declareInt(Field::fragmentOffset, FRAGMENT_OFFSET_FIELD);
parser.declareStringArray(fromList(String.class, Field::matchedFields), MATCHED_FIELDS_FIELD); parser.declareStringArray(fromList(String.class, Field::matchedFields), MATCHED_FIELDS_FIELD);
BiFunction<XContentParser, Field, Field> decoratedParser = setupParser(parser); BiFunction<XContentParser, Field, Field> decoratedParser = setupParser(parser);
PARSER = (XContentParser p, QueryParseContext c, String name) -> decoratedParser.apply(p, new Field(name)); PARSER = (XContentParser p, Void c, String name) -> decoratedParser.apply(p, new Field(name));
} }
private final String name; private final String name;

View File

@ -25,8 +25,8 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.rescore.QueryRescorer.QueryRescoreContext; import org.elasticsearch.search.rescore.QueryRescorer.QueryRescoreContext;
@ -53,7 +53,7 @@ public class QueryRescorerBuilder extends RescoreBuilder<QueryRescorerBuilder> {
private static ParseField RESCORE_QUERY_WEIGHT_FIELD = new ParseField("rescore_query_weight"); private static ParseField RESCORE_QUERY_WEIGHT_FIELD = new ParseField("rescore_query_weight");
private static ParseField SCORE_MODE_FIELD = new ParseField("score_mode"); private static ParseField SCORE_MODE_FIELD = new ParseField("score_mode");
private static final ObjectParser<InnerBuilder, QueryParseContext> QUERY_RESCORE_PARSER = new ObjectParser<>(NAME, null); private static final ObjectParser<InnerBuilder, Void> QUERY_RESCORE_PARSER = new ObjectParser<>(NAME, null);
static { static {
QUERY_RESCORE_PARSER.declareObject(InnerBuilder::setQueryBuilder, (p, c) -> { QUERY_RESCORE_PARSER.declareObject(InnerBuilder::setQueryBuilder, (p, c) -> {
@ -161,8 +161,8 @@ public class QueryRescorerBuilder extends RescoreBuilder<QueryRescorerBuilder> {
builder.endObject(); builder.endObject();
} }
public static QueryRescorerBuilder fromXContent(QueryParseContext parseContext) throws IOException { public static QueryRescorerBuilder fromXContent(XContentParser parser) throws IOException {
InnerBuilder innerBuilder = QUERY_RESCORE_PARSER.parse(parseContext.parser(), new InnerBuilder(), parseContext); InnerBuilder innerBuilder = QUERY_RESCORE_PARSER.parse(parser, new InnerBuilder(), null);
return innerBuilder.build(); return innerBuilder.build();
} }

View File

@ -28,7 +28,6 @@ import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.rescore.QueryRescorer.QueryRescoreContext; import org.elasticsearch.search.rescore.QueryRescorer.QueryRescoreContext;
@ -75,8 +74,7 @@ public abstract class RescoreBuilder<RB extends RescoreBuilder<RB>> extends ToXC
return windowSize; return windowSize;
} }
public static RescoreBuilder<?> parseFromXContent(QueryParseContext parseContext) throws IOException { public static RescoreBuilder<?> parseFromXContent(XContentParser parser) throws IOException {
XContentParser parser = parseContext.parser();
String fieldName = null; String fieldName = null;
RescoreBuilder<?> rescorer = null; RescoreBuilder<?> rescorer = null;
Integer windowSize = null; Integer windowSize = null;
@ -93,7 +91,7 @@ public abstract class RescoreBuilder<RB extends RescoreBuilder<RB>> extends ToXC
} else if (token == XContentParser.Token.START_OBJECT) { } else if (token == XContentParser.Token.START_OBJECT) {
// we only have QueryRescorer at this point // we only have QueryRescorer at this point
if (QueryRescorerBuilder.NAME.equals(fieldName)) { if (QueryRescorerBuilder.NAME.equals(fieldName)) {
rescorer = QueryRescorerBuilder.fromXContent(parseContext); rescorer = QueryRescorerBuilder.fromXContent(parser);
} else { } else {
throw new ParsingException(parser.getTokenLocation(), "rescore doesn't support rescorer with name [" + fieldName + "]"); throw new ParsingException(parser.getTokenLocation(), "rescore doesn't support rescorer with name [" + fieldName + "]");
} }

View File

@ -30,13 +30,12 @@ import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexNumericFieldData; import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.index.mapper.IdFieldMapper; import org.elasticsearch.index.mapper.IdFieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.UidFieldMapper; import org.elasticsearch.index.mapper.UidFieldMapper;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.QueryShardContext;
import java.io.IOException; import java.io.IOException;
@ -57,7 +56,7 @@ public class SliceBuilder extends ToXContentToBytes implements Writeable {
public static final ParseField FIELD_FIELD = new ParseField("field"); public static final ParseField FIELD_FIELD = new ParseField("field");
public static final ParseField ID_FIELD = new ParseField("id"); public static final ParseField ID_FIELD = new ParseField("id");
public static final ParseField MAX_FIELD = new ParseField("max"); public static final ParseField MAX_FIELD = new ParseField("max");
private static final ObjectParser<SliceBuilder, QueryParseContext> PARSER = private static final ObjectParser<SliceBuilder, Void> PARSER =
new ObjectParser<>("slice", SliceBuilder::new); new ObjectParser<>("slice", SliceBuilder::new);
static { static {
@ -169,8 +168,8 @@ public class SliceBuilder extends ToXContentToBytes implements Writeable {
builder.field(MAX_FIELD.getPreferredName(), max); builder.field(MAX_FIELD.getPreferredName(), max);
} }
public static SliceBuilder fromXContent(QueryParseContext context) throws IOException { public static SliceBuilder fromXContent(XContentParser parser) throws IOException {
SliceBuilder builder = PARSER.parse(context.parser(), new SliceBuilder(), context); SliceBuilder builder = PARSER.parse(parser, new SliceBuilder(), null);
return builder; return builder;
} }

View File

@ -32,7 +32,6 @@ import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.N
import org.elasticsearch.index.fielddata.IndexNumericFieldData; import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.query.QueryShardException; import org.elasticsearch.index.query.QueryShardException;
import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.DocValueFormat;
@ -313,7 +312,7 @@ public class FieldSortBuilder extends SortBuilder<FieldSortBuilder> {
} }
/** /**
* Creates a new {@link FieldSortBuilder} from the query held by the {@link QueryParseContext} in * Creates a new {@link FieldSortBuilder} from the query held by the {@link XContentParser} in
* {@link org.elasticsearch.common.xcontent.XContent} format. * {@link org.elasticsearch.common.xcontent.XContent} format.
* *
* @param parser the input parser. The state on the parser contained in this context will be changed as a side effect of this * @param parser the input parser. The state on the parser contained in this context will be changed as a side effect of this
@ -325,7 +324,7 @@ public class FieldSortBuilder extends SortBuilder<FieldSortBuilder> {
return PARSER.parse(parser, new FieldSortBuilder(fieldName), null); return PARSER.parse(parser, new FieldSortBuilder(fieldName), null);
} }
private static ObjectParser<FieldSortBuilder, QueryParseContext> PARSER = new ObjectParser<>(NAME); private static ObjectParser<FieldSortBuilder, Void> PARSER = new ObjectParser<>(NAME);
static { static {
PARSER.declareField(FieldSortBuilder::missing, p -> p.objectText(), MISSING, ValueType.VALUE); PARSER.declareField(FieldSortBuilder::missing, p -> p.objectText(), MISSING, ValueType.VALUE);

View File

@ -48,7 +48,6 @@ import org.elasticsearch.index.fielddata.plain.AbstractLatLonPointDVIndexFieldDa
import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.query.GeoValidationMethod; import org.elasticsearch.index.query.GeoValidationMethod;
import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.MultiValueMode; import org.elasticsearch.search.MultiValueMode;
@ -384,7 +383,7 @@ public class GeoDistanceSortBuilder extends SortBuilder<GeoDistanceSortBuilder>
} }
/** /**
* Creates a new {@link GeoDistanceSortBuilder} from the query held by the {@link QueryParseContext} in * Creates a new {@link GeoDistanceSortBuilder} from the query held by the {@link XContentParser} in
* {@link org.elasticsearch.common.xcontent.XContent} format. * {@link org.elasticsearch.common.xcontent.XContent} format.
* *
* @param parser the input parser. The state on the parser contained in this context will be changed as a * @param parser the input parser. The state on the parser contained in this context will be changed as a

View File

@ -25,7 +25,6 @@ import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.DocValueFormat;
@ -74,7 +73,7 @@ public class ScoreSortBuilder extends SortBuilder<ScoreSortBuilder> {
} }
/** /**
* Creates a new {@link ScoreSortBuilder} from the query held by the {@link QueryParseContext} in * Creates a new {@link ScoreSortBuilder} from the query held by the {@link XContentParser} in
* {@link org.elasticsearch.common.xcontent.XContent} format. * {@link org.elasticsearch.common.xcontent.XContent} format.
* *
* @param parser the input parser. The state on the parser contained in this context will be changed as a side effect of this * @param parser the input parser. The state on the parser contained in this context will be changed as a side effect of this
@ -82,11 +81,11 @@ public class ScoreSortBuilder extends SortBuilder<ScoreSortBuilder> {
* @param fieldName in some sort syntax variations the field name precedes the xContent object that specifies further parameters, e.g. * @param fieldName in some sort syntax variations the field name precedes the xContent object that specifies further parameters, e.g.
* in '{ "foo": { "order" : "asc"} }'. When parsing the inner object, the field name can be passed in via this argument * in '{ "foo": { "order" : "asc"} }'. When parsing the inner object, the field name can be passed in via this argument
*/ */
public static ScoreSortBuilder fromXContent(XContentParser parser, String fieldName) throws IOException { public static ScoreSortBuilder fromXContent(XContentParser parser, String fieldName) {
return PARSER.apply(parser, null); return PARSER.apply(parser, null);
} }
private static ObjectParser<ScoreSortBuilder, QueryParseContext> PARSER = new ObjectParser<>(NAME, ScoreSortBuilder::new); private static ObjectParser<ScoreSortBuilder, Void> PARSER = new ObjectParser<>(NAME, ScoreSortBuilder::new);
static { static {
PARSER.declareString((builder, order) -> builder.order(SortOrder.fromString(order)), ORDER_FIELD); PARSER.declareString((builder, order) -> builder.order(SortOrder.fromString(order)), ORDER_FIELD);

View File

@ -43,7 +43,6 @@ import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
import org.elasticsearch.index.fielddata.fieldcomparator.BytesRefFieldComparatorSource; import org.elasticsearch.index.fielddata.fieldcomparator.BytesRefFieldComparatorSource;
import org.elasticsearch.index.fielddata.fieldcomparator.DoubleValuesComparatorSource; import org.elasticsearch.index.fielddata.fieldcomparator.DoubleValuesComparatorSource;
import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.query.QueryShardException; import org.elasticsearch.index.query.QueryShardException;
import org.elasticsearch.script.Script; import org.elasticsearch.script.Script;
@ -212,7 +211,7 @@ public class ScriptSortBuilder extends SortBuilder<ScriptSortBuilder> {
return builder; return builder;
} }
private static ConstructingObjectParser<ScriptSortBuilder, QueryParseContext> PARSER = new ConstructingObjectParser<>(NAME, private static ConstructingObjectParser<ScriptSortBuilder, Void> PARSER = new ConstructingObjectParser<>(NAME,
a -> new ScriptSortBuilder((Script) a[0], (ScriptSortType) a[1])); a -> new ScriptSortBuilder((Script) a[0], (ScriptSortType) a[1]));
static { static {
@ -226,7 +225,7 @@ public class ScriptSortBuilder extends SortBuilder<ScriptSortBuilder> {
} }
/** /**
* Creates a new {@link ScriptSortBuilder} from the query held by the {@link QueryParseContext} in * Creates a new {@link ScriptSortBuilder} from the query held by the {@link XContentParser} in
* {@link org.elasticsearch.common.xcontent.XContent} format. * {@link org.elasticsearch.common.xcontent.XContent} format.
* *
* @param parser the input parser. The state on the parser contained in this context will be changed as a side effect of this * @param parser the input parser. The state on the parser contained in this context will be changed as a side effect of this
@ -234,7 +233,7 @@ public class ScriptSortBuilder extends SortBuilder<ScriptSortBuilder> {
* @param elementName in some sort syntax variations the field name precedes the xContent object that specifies further parameters, e.g. * @param elementName in some sort syntax variations the field name precedes the xContent object that specifies further parameters, e.g.
* in '{ "foo": { "order" : "asc"} }'. When parsing the inner object, the field name can be passed in via this argument * in '{ "foo": { "order" : "asc"} }'. When parsing the inner object, the field name can be passed in via this argument
*/ */
public static ScriptSortBuilder fromXContent(XContentParser parser, String elementName) throws IOException { public static ScriptSortBuilder fromXContent(XContentParser parser, String elementName) {
return PARSER.apply(parser, null); return PARSER.apply(parser, null);
} }

View File

@ -276,7 +276,7 @@ public class CompletionSuggestionBuilder extends SuggestionBuilder<CompletionSug
if (currentToken == XContentParser.Token.FIELD_NAME) { if (currentToken == XContentParser.Token.FIELD_NAME) {
currentFieldName = contextParser.currentName(); currentFieldName = contextParser.currentName();
final ContextMapping mapping = contextMappings.get(currentFieldName); final ContextMapping mapping = contextMappings.get(currentFieldName);
queryContexts.put(currentFieldName, mapping.parseQueryContext(context.newParseContext(contextParser))); queryContexts.put(currentFieldName, mapping.parseQueryContext(contextParser));
} }
} }
suggestionContext.setQueryContexts(queryContexts); suggestionContext.setQueryContexts(queryContexts);

View File

@ -28,7 +28,6 @@ import org.elasticsearch.common.xcontent.XContentParser.Token;
import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper;
import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.ParseContext.Document; import org.elasticsearch.index.mapper.ParseContext.Document;
import org.elasticsearch.index.query.QueryParseContext;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
@ -148,8 +147,8 @@ public class CategoryContextMapping extends ContextMapping<CategoryQueryContext>
} }
@Override @Override
protected CategoryQueryContext fromXContent(QueryParseContext context) throws IOException { protected CategoryQueryContext fromXContent(XContentParser parser) throws IOException {
return CategoryQueryContext.fromXContent(context); return CategoryQueryContext.fromXContent(parser);
} }
/** /**

View File

@ -26,7 +26,6 @@ import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryParseContext;
import java.io.IOException; import java.io.IOException;
import java.util.Objects; import java.util.Objects;
@ -105,8 +104,7 @@ public final class CategoryQueryContext implements ToXContent {
CATEGORY_PARSER.declareBoolean(Builder::setPrefix, new ParseField(CONTEXT_PREFIX)); CATEGORY_PARSER.declareBoolean(Builder::setPrefix, new ParseField(CONTEXT_PREFIX));
} }
public static CategoryQueryContext fromXContent(QueryParseContext context) throws IOException { public static CategoryQueryContext fromXContent(XContentParser parser) throws IOException {
XContentParser parser = context.parser();
XContentParser.Token token = parser.currentToken(); XContentParser.Token token = parser.currentToken();
Builder builder = builder(); Builder builder = builder();
if (token == XContentParser.Token.START_OBJECT) { if (token == XContentParser.Token.START_OBJECT) {
@ -114,7 +112,7 @@ public final class CategoryQueryContext implements ToXContent {
CATEGORY_PARSER.parse(parser, builder, null); CATEGORY_PARSER.parse(parser, builder, null);
} catch(ParsingException e) { } catch(ParsingException e) {
throw new ElasticsearchParseException("category context must be a string, number or boolean"); throw new ElasticsearchParseException("category context must be a string, number or boolean");
} }
} else if (token == XContentParser.Token.VALUE_STRING || token == XContentParser.Token.VALUE_BOOLEAN } else if (token == XContentParser.Token.VALUE_STRING || token == XContentParser.Token.VALUE_BOOLEAN
|| token == XContentParser.Token.VALUE_NUMBER) { || token == XContentParser.Token.VALUE_NUMBER) {
builder.setCategory(parser.text()); builder.setCategory(parser.text());

View File

@ -27,7 +27,6 @@ import org.elasticsearch.common.xcontent.XContentParser.Token;
import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.index.mapper.CompletionFieldMapper; import org.elasticsearch.index.mapper.CompletionFieldMapper;
import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.query.QueryParseContext;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
@ -100,23 +99,22 @@ public abstract class ContextMapping<T extends ToXContent> implements ToXContent
/** /**
* Prototype for the query context * Prototype for the query context
*/ */
protected abstract T fromXContent(QueryParseContext context) throws IOException; protected abstract T fromXContent(XContentParser context) throws IOException;
/** /**
* Parses query contexts for this mapper * Parses query contexts for this mapper
*/ */
public final List<InternalQueryContext> parseQueryContext(QueryParseContext context) throws IOException, ElasticsearchParseException { public final List<InternalQueryContext> parseQueryContext(XContentParser parser) throws IOException, ElasticsearchParseException {
List<T> queryContexts = new ArrayList<>(); List<T> queryContexts = new ArrayList<>();
XContentParser parser = context.parser();
Token token = parser.nextToken(); Token token = parser.nextToken();
if (token == Token.START_ARRAY) { if (token == Token.START_ARRAY) {
while (parser.nextToken() != Token.END_ARRAY) { while (parser.nextToken() != Token.END_ARRAY) {
queryContexts.add(fromXContent(context)); queryContexts.add(fromXContent(parser));
} }
} else { } else {
queryContexts.add(fromXContent(context)); queryContexts.add(fromXContent(parser));
} }
return toInternalQueryContexts(queryContexts); return toInternalQueryContexts(queryContexts);
} }

View File

@ -29,11 +29,10 @@ import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParser.Token; import org.elasticsearch.common.xcontent.XContentParser.Token;
import org.elasticsearch.index.mapper.GeoPointFieldMapper;
import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.GeoPointFieldMapper;
import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.ParseContext.Document; import org.elasticsearch.index.mapper.ParseContext.Document;
import org.elasticsearch.index.query.QueryParseContext;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
@ -225,8 +224,8 @@ public class GeoContextMapping extends ContextMapping<GeoQueryContext> {
} }
@Override @Override
protected GeoQueryContext fromXContent(QueryParseContext context) throws IOException { protected GeoQueryContext fromXContent(XContentParser parser) throws IOException {
return GeoQueryContext.fromXContent(context); return GeoQueryContext.fromXContent(parser);
} }
/** /**

View File

@ -27,7 +27,6 @@ import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryParseContext;
import java.io.IOException; import java.io.IOException;
import java.util.Collections; import java.util.Collections;
@ -124,8 +123,7 @@ public final class GeoQueryContext implements ToXContent {
GEO_CONTEXT_PARSER.declareDouble(GeoQueryContext.Builder::setLon, new ParseField("lon")); GEO_CONTEXT_PARSER.declareDouble(GeoQueryContext.Builder::setLon, new ParseField("lon"));
} }
public static GeoQueryContext fromXContent(QueryParseContext context) throws IOException { public static GeoQueryContext fromXContent(XContentParser parser) throws IOException {
XContentParser parser = context.parser();
XContentParser.Token token = parser.currentToken(); XContentParser.Token token = parser.currentToken();
GeoQueryContext.Builder builder = new Builder(); GeoQueryContext.Builder builder = new Builder();
if (token == XContentParser.Token.START_OBJECT) { if (token == XContentParser.Token.START_OBJECT) {

View File

@ -20,17 +20,13 @@
package org.elasticsearch.index.query; package org.elasticsearch.index.query;
import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.search.SearchModule; import org.elasticsearch.search.SearchModule;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.junit.After;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import java.io.IOException; import java.io.IOException;
@ -38,7 +34,8 @@ import java.io.IOException;
import static java.util.Collections.emptyList; import static java.util.Collections.emptyList;
import static org.elasticsearch.index.query.AbstractQueryBuilder.parseInnerQueryBuilder; import static org.elasticsearch.index.query.AbstractQueryBuilder.parseInnerQueryBuilder;
public class QueryParseContextTests extends ESTestCase { public class AbstractQueryBuilderTests extends ESTestCase {
private static NamedXContentRegistry xContentRegistry; private static NamedXContentRegistry xContentRegistry;
@BeforeClass @BeforeClass
@ -51,20 +48,6 @@ public class QueryParseContextTests extends ESTestCase {
xContentRegistry = null; xContentRegistry = null;
} }
private ThreadContext threadContext;
@Before
public void beforeTest() throws IOException {
this.threadContext = new ThreadContext(Settings.EMPTY);
DeprecationLogger.setThreadContext(threadContext);
}
@After
public void teardown() throws IOException {
DeprecationLogger.removeThreadContext(this.threadContext);
this.threadContext.close();
}
public void testParseInnerQueryBuilder() throws IOException { public void testParseInnerQueryBuilder() throws IOException {
QueryBuilder query = new MatchQueryBuilder("foo", "bar"); QueryBuilder query = new MatchQueryBuilder("foo", "bar");
String source = query.toString(); String source = query.toString();
@ -91,7 +74,6 @@ public class QueryParseContextTests extends ESTestCase {
source = "{ \"foo\" : \"bar\" }"; source = "{ \"foo\" : \"bar\" }";
try (XContentParser parser = createParser(JsonXContent.jsonXContent, source)) { try (XContentParser parser = createParser(JsonXContent.jsonXContent, source)) {
QueryParseContext context = new QueryParseContext(parser);
ParsingException exception = expectThrows(ParsingException.class, () -> parseInnerQueryBuilder(parser)); ParsingException exception = expectThrows(ParsingException.class, () -> parseInnerQueryBuilder(parser));
assertEquals("[foo] query malformed, no start_object after query name", exception.getMessage()); assertEquals("[foo] query malformed, no start_object after query name", exception.getMessage());
} }
@ -107,4 +89,5 @@ public class QueryParseContextTests extends ESTestCase {
protected NamedXContentRegistry xContentRegistry() { protected NamedXContentRegistry xContentRegistry() {
return xContentRegistry; return xContentRegistry;
} }
} }

View File

@ -224,7 +224,7 @@ public class RangeQueryBuilderTests extends AbstractQueryTestCase<RangeQueryBuil
/** /**
* Specifying a timezone together with a numeric range query should throw an exception. * Specifying a timezone together with a numeric range query should throw an exception.
*/ */
public void testToQueryNonDateWithTimezone() throws QueryShardException, IOException { public void testToQueryNonDateWithTimezone() throws QueryShardException {
RangeQueryBuilder query = new RangeQueryBuilder(INT_FIELD_NAME); RangeQueryBuilder query = new RangeQueryBuilder(INT_FIELD_NAME);
query.from(1).to(10).timeZone("UTC"); query.from(1).to(10).timeZone("UTC");
QueryShardException e = expectThrows(QueryShardException.class, () -> query.toQuery(createShardContext())); QueryShardException e = expectThrows(QueryShardException.class, () -> query.toQuery(createShardContext()));
@ -234,7 +234,7 @@ public class RangeQueryBuilderTests extends AbstractQueryTestCase<RangeQueryBuil
/** /**
* Specifying a timezone together with an unmapped field should throw an exception. * Specifying a timezone together with an unmapped field should throw an exception.
*/ */
public void testToQueryUnmappedWithTimezone() throws QueryShardException, IOException { public void testToQueryUnmappedWithTimezone() throws QueryShardException {
RangeQueryBuilder query = new RangeQueryBuilder("bogus_field"); RangeQueryBuilder query = new RangeQueryBuilder("bogus_field");
query.from(1).to(10).timeZone("UTC"); query.from(1).to(10).timeZone("UTC");
QueryShardException e = expectThrows(QueryShardException.class, () -> query.toQuery(createShardContext())); QueryShardException e = expectThrows(QueryShardException.class, () -> query.toQuery(createShardContext()));
@ -410,7 +410,7 @@ public class RangeQueryBuilderTests extends AbstractQueryTestCase<RangeQueryBuil
String fieldName = randomAlphaOfLengthBetween(1, 20); String fieldName = randomAlphaOfLengthBetween(1, 20);
RangeQueryBuilder query = new RangeQueryBuilder(fieldName) { RangeQueryBuilder query = new RangeQueryBuilder(fieldName) {
@Override @Override
protected MappedFieldType.Relation getRelation(QueryRewriteContext queryRewriteContext) throws IOException { protected MappedFieldType.Relation getRelation(QueryRewriteContext queryRewriteContext) {
return Relation.WITHIN; return Relation.WITHIN;
} }
}; };
@ -431,7 +431,7 @@ public class RangeQueryBuilderTests extends AbstractQueryTestCase<RangeQueryBuil
String fieldName = randomAlphaOfLengthBetween(1, 20); String fieldName = randomAlphaOfLengthBetween(1, 20);
RangeQueryBuilder query = new RangeQueryBuilder(fieldName) { RangeQueryBuilder query = new RangeQueryBuilder(fieldName) {
@Override @Override
protected MappedFieldType.Relation getRelation(QueryRewriteContext queryRewriteContext) throws IOException { protected MappedFieldType.Relation getRelation(QueryRewriteContext queryRewriteContext) {
return Relation.WITHIN; return Relation.WITHIN;
} }
}; };
@ -456,7 +456,7 @@ public class RangeQueryBuilderTests extends AbstractQueryTestCase<RangeQueryBuil
String fieldName = randomAlphaOfLengthBetween(1, 20); String fieldName = randomAlphaOfLengthBetween(1, 20);
RangeQueryBuilder query = new RangeQueryBuilder(fieldName) { RangeQueryBuilder query = new RangeQueryBuilder(fieldName) {
@Override @Override
protected MappedFieldType.Relation getRelation(QueryRewriteContext queryRewriteContext) throws IOException { protected MappedFieldType.Relation getRelation(QueryRewriteContext queryRewriteContext) {
return Relation.DISJOINT; return Relation.DISJOINT;
} }
}; };
@ -473,7 +473,7 @@ public class RangeQueryBuilderTests extends AbstractQueryTestCase<RangeQueryBuil
String fieldName = randomAlphaOfLengthBetween(1, 20); String fieldName = randomAlphaOfLengthBetween(1, 20);
RangeQueryBuilder query = new RangeQueryBuilder(fieldName) { RangeQueryBuilder query = new RangeQueryBuilder(fieldName) {
@Override @Override
protected MappedFieldType.Relation getRelation(QueryRewriteContext queryRewriteContext) throws IOException { protected MappedFieldType.Relation getRelation(QueryRewriteContext queryRewriteContext) {
return Relation.INTERSECTS; return Relation.INTERSECTS;
} }
}; };
@ -490,7 +490,7 @@ public class RangeQueryBuilderTests extends AbstractQueryTestCase<RangeQueryBuil
String fieldName = randomAlphaOfLengthBetween(1, 20); String fieldName = randomAlphaOfLengthBetween(1, 20);
RangeQueryBuilder query = new RangeQueryBuilder(fieldName) { RangeQueryBuilder query = new RangeQueryBuilder(fieldName) {
@Override @Override
protected MappedFieldType.Relation getRelation(QueryRewriteContext queryRewriteContext) throws IOException { protected MappedFieldType.Relation getRelation(QueryRewriteContext queryRewriteContext) {
return Relation.INTERSECTS; return Relation.INTERSECTS;
} }
}; };
@ -499,7 +499,7 @@ public class RangeQueryBuilderTests extends AbstractQueryTestCase<RangeQueryBuil
assertThat(rewritten, sameInstance(query)); assertThat(rewritten, sameInstance(query));
} }
public void testParseFailsWithMultipleFields() throws IOException { public void testParseFailsWithMultipleFields() {
String json = String json =
"{\n" + "{\n" +
" \"range\": {\n" + " \"range\": {\n" +
@ -517,7 +517,7 @@ public class RangeQueryBuilderTests extends AbstractQueryTestCase<RangeQueryBuil
assertEquals("[range] query doesn't support multiple fields, found [age] and [price]", e.getMessage()); assertEquals("[range] query doesn't support multiple fields, found [age] and [price]", e.getMessage());
} }
public void testParseFailsWithMultipleFieldsWhenOneIsDate() throws IOException { public void testParseFailsWithMultipleFieldsWhenOneIsDate() {
String json = String json =
"{\n" + "{\n" +
" \"range\": {\n" + " \"range\": {\n" +

View File

@ -24,8 +24,8 @@ import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.TermQueryBuilder; import org.elasticsearch.index.query.TermQueryBuilder;
import org.elasticsearch.index.query.functionscore.GaussDecayFunctionBuilder; import org.elasticsearch.index.query.functionscore.GaussDecayFunctionBuilder;
import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.plugins.SearchPlugin;
@ -91,6 +91,7 @@ public class SearchModuleTests extends ModuleTestCase {
() -> new SearchModule(Settings.EMPTY, false, singletonList(registersDupeHighlighter))); () -> new SearchModule(Settings.EMPTY, false, singletonList(registersDupeHighlighter)));
SearchPlugin registersDupeSuggester = new SearchPlugin() { SearchPlugin registersDupeSuggester = new SearchPlugin() {
@Override
public List<SearchPlugin.SuggesterSpec<?>> getSuggesters() { public List<SearchPlugin.SuggesterSpec<?>> getSuggesters() {
return singletonList(new SuggesterSpec<>("term", TermSuggestionBuilder::new, TermSuggestionBuilder::fromXContent)); return singletonList(new SuggesterSpec<>("term", TermSuggestionBuilder::new, TermSuggestionBuilder::fromXContent));
} }
@ -136,6 +137,7 @@ public class SearchModuleTests extends ModuleTestCase {
singletonList(registersDupeFetchSubPhase))); singletonList(registersDupeFetchSubPhase)));
SearchPlugin registersDupeQuery = new SearchPlugin() { SearchPlugin registersDupeQuery = new SearchPlugin() {
@Override
public List<SearchPlugin.QuerySpec<?>> getQueries() { public List<SearchPlugin.QuerySpec<?>> getQueries() {
return singletonList(new QuerySpec<>(TermQueryBuilder.NAME, TermQueryBuilder::new, TermQueryBuilder::fromXContent)); return singletonList(new QuerySpec<>(TermQueryBuilder.NAME, TermQueryBuilder::new, TermQueryBuilder::fromXContent));
} }
@ -144,6 +146,7 @@ public class SearchModuleTests extends ModuleTestCase {
new SearchModule(Settings.EMPTY, false, singletonList(registersDupeQuery)).getNamedXContents())); new SearchModule(Settings.EMPTY, false, singletonList(registersDupeQuery)).getNamedXContents()));
SearchPlugin registersDupeAggregation = new SearchPlugin() { SearchPlugin registersDupeAggregation = new SearchPlugin() {
@Override
public List<AggregationSpec> getAggregations() { public List<AggregationSpec> getAggregations() {
return singletonList(new AggregationSpec(TermsAggregationBuilder.NAME, TermsAggregationBuilder::new, return singletonList(new AggregationSpec(TermsAggregationBuilder.NAME, TermsAggregationBuilder::new,
TermsAggregationBuilder::parse)); TermsAggregationBuilder::parse));
@ -153,6 +156,7 @@ public class SearchModuleTests extends ModuleTestCase {
singletonList(registersDupeAggregation)).getNamedXContents())); singletonList(registersDupeAggregation)).getNamedXContents()));
SearchPlugin registersDupePipelineAggregation = new SearchPlugin() { SearchPlugin registersDupePipelineAggregation = new SearchPlugin() {
@Override
public List<PipelineAggregationSpec> getPipelineAggregations() { public List<PipelineAggregationSpec> getPipelineAggregations() {
return singletonList(new PipelineAggregationSpec( return singletonList(new PipelineAggregationSpec(
DerivativePipelineAggregationBuilder.NAME, DerivativePipelineAggregationBuilder.NAME,
@ -229,6 +233,7 @@ public class SearchModuleTests extends ModuleTestCase {
public void testRegisterAggregation() { public void testRegisterAggregation() {
SearchModule module = new SearchModule(Settings.EMPTY, false, singletonList(new SearchPlugin() { SearchModule module = new SearchModule(Settings.EMPTY, false, singletonList(new SearchPlugin() {
@Override
public List<AggregationSpec> getAggregations() { public List<AggregationSpec> getAggregations() {
return singletonList(new AggregationSpec("test", TestAggregationBuilder::new, TestAggregationBuilder::fromXContent)); return singletonList(new AggregationSpec("test", TestAggregationBuilder::new, TestAggregationBuilder::fromXContent));
} }
@ -243,6 +248,7 @@ public class SearchModuleTests extends ModuleTestCase {
public void testRegisterPipelineAggregation() { public void testRegisterPipelineAggregation() {
SearchModule module = new SearchModule(Settings.EMPTY, false, singletonList(new SearchPlugin() { SearchModule module = new SearchModule(Settings.EMPTY, false, singletonList(new SearchPlugin() {
@Override
public List<PipelineAggregationSpec> getPipelineAggregations() { public List<PipelineAggregationSpec> getPipelineAggregations() {
return singletonList(new PipelineAggregationSpec("test", return singletonList(new PipelineAggregationSpec("test",
TestPipelineAggregationBuilder::new, TestPipelineAggregator::new, TestPipelineAggregationBuilder::fromXContent)); TestPipelineAggregationBuilder::new, TestPipelineAggregator::new, TestPipelineAggregationBuilder::fromXContent));
@ -344,7 +350,7 @@ public class SearchModuleTests extends ModuleTestCase {
return false; return false;
} }
private static TestAggregationBuilder fromXContent(String name, QueryParseContext c) { private static TestAggregationBuilder fromXContent(String name, XContentParser p) {
return null; return null;
} }
} }
@ -389,7 +395,7 @@ public class SearchModuleTests extends ModuleTestCase {
return false; return false;
} }
private static TestPipelineAggregationBuilder fromXContent(String name, QueryParseContext c) { private static TestPipelineAggregationBuilder fromXContent(String name, XContentParser p) {
return null; return null;
} }
} }

View File

@ -19,6 +19,7 @@
package org.elasticsearch.search; package org.elasticsearch.search;
import com.carrotsearch.hppc.IntArrayList; import com.carrotsearch.hppc.IntArrayList;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.store.AlreadyClosedException;
import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListener;
@ -272,7 +273,7 @@ public class SearchServiceTests extends ESSingleNodeTestCase {
} }
@Override @Override
protected QueryBuilder doRewrite(QueryRewriteContext queryShardContext) throws IOException { protected QueryBuilder doRewrite(QueryRewriteContext queryShardContext) {
throw new IllegalStateException("Fail on rewrite phase"); throw new IllegalStateException("Fail on rewrite phase");
} }

View File

@ -22,7 +22,6 @@ package org.elasticsearch.search.aggregations;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.index.IndexService; import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.ESSingleNodeTestCase;
@ -58,10 +57,9 @@ public class AggregationCollectorTests extends ESSingleNodeTestCase {
private boolean needsScores(IndexService index, String agg) throws IOException { private boolean needsScores(IndexService index, String agg) throws IOException {
XContentParser aggParser = createParser(JsonXContent.jsonXContent, agg); XContentParser aggParser = createParser(JsonXContent.jsonXContent, agg);
QueryParseContext parseContext = new QueryParseContext(aggParser);
aggParser.nextToken(); aggParser.nextToken();
SearchContext context = createSearchContext(index); SearchContext context = createSearchContext(index);
final AggregatorFactories factories = AggregatorFactories.parseAggregators(parseContext).build(context, null); final AggregatorFactories factories = AggregatorFactories.parseAggregators(aggParser).build(context, null);
final Aggregator[] aggregators = factories.createTopLevelAggregators(); final Aggregator[] aggregators = factories.createTopLevelAggregators();
assertEquals(1, aggregators.length); assertEquals(1, aggregators.length);
return aggregators[0].needsScores(); return aggregators[0].needsScores();

View File

@ -26,8 +26,6 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.env.Environment; import org.elasticsearch.env.Environment;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.search.SearchModule; import org.elasticsearch.search.SearchModule;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilders; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilders;
import org.elasticsearch.test.AbstractQueryTestCase; import org.elasticsearch.test.AbstractQueryTestCase;
@ -96,9 +94,8 @@ public class AggregatorFactoriesTests extends ESTestCase {
.endObject() .endObject()
.endObject(); .endObject();
XContentParser parser = createParser(source); XContentParser parser = createParser(source);
QueryParseContext parseContext = new QueryParseContext(parser);
assertSame(XContentParser.Token.START_OBJECT, parser.nextToken()); assertSame(XContentParser.Token.START_OBJECT, parser.nextToken());
Exception e = expectThrows(ParsingException.class, () -> AggregatorFactories.parseAggregators(parseContext)); Exception e = expectThrows(ParsingException.class, () -> AggregatorFactories.parseAggregators(parser));
assertThat(e.toString(), containsString("Found two aggregation type definitions in [in_stock]: [filter] and [terms]")); assertThat(e.toString(), containsString("Found two aggregation type definitions in [in_stock]: [filter] and [terms]"));
} }
@ -129,9 +126,8 @@ public class AggregatorFactoriesTests extends ESTestCase {
.endObject() .endObject()
.endObject(); .endObject();
XContentParser parser = createParser(source); XContentParser parser = createParser(source);
QueryParseContext parseContext = new QueryParseContext(parser);
assertSame(XContentParser.Token.START_OBJECT, parser.nextToken()); assertSame(XContentParser.Token.START_OBJECT, parser.nextToken());
Exception e = expectThrows(ParsingException.class, () -> AggregatorFactories.parseAggregators(parseContext)); Exception e = expectThrows(ParsingException.class, () -> AggregatorFactories.parseAggregators(parser));
assertThat(e.toString(), containsString("Found two sub aggregation definitions under [by_date]")); assertThat(e.toString(), containsString("Found two sub aggregation definitions under [by_date]"));
} }
@ -164,9 +160,8 @@ public class AggregatorFactoriesTests extends ESTestCase {
.endObject() .endObject()
.endObject(); .endObject();
XContentParser parser = createParser(source); XContentParser parser = createParser(source);
QueryParseContext parseContext = new QueryParseContext(parser);
assertSame(XContentParser.Token.START_OBJECT, parser.nextToken()); assertSame(XContentParser.Token.START_OBJECT, parser.nextToken());
Exception e = expectThrows(ParsingException.class, () -> AggregatorFactories.parseAggregators(parseContext)); Exception e = expectThrows(ParsingException.class, () -> AggregatorFactories.parseAggregators(parser));
assertThat(e.toString(), containsString("Invalid aggregation name [" + name + "]")); assertThat(e.toString(), containsString("Invalid aggregation name [" + name + "]"));
} }
@ -188,9 +183,8 @@ public class AggregatorFactoriesTests extends ESTestCase {
.endObject() .endObject()
.endObject(); .endObject();
XContentParser parser = createParser(source); XContentParser parser = createParser(source);
QueryParseContext parseContext = new QueryParseContext(parser);
assertSame(XContentParser.Token.START_OBJECT, parser.nextToken()); assertSame(XContentParser.Token.START_OBJECT, parser.nextToken());
Exception e = expectThrows(ParsingException.class, () -> AggregatorFactories.parseAggregators(parseContext)); Exception e = expectThrows(ParsingException.class, () -> AggregatorFactories.parseAggregators(parser));
assertThat(e.toString(), containsString("Two sibling aggregations cannot have the same name: [" + name + "]")); assertThat(e.toString(), containsString("Two sibling aggregations cannot have the same name: [" + name + "]"));
} }
@ -213,9 +207,8 @@ public class AggregatorFactoriesTests extends ESTestCase {
.endObject() .endObject()
.endObject(); .endObject();
XContentParser parser = createParser(source); XContentParser parser = createParser(source);
QueryParseContext parseContext = new QueryParseContext(parser);
assertSame(XContentParser.Token.START_OBJECT, parser.nextToken()); assertSame(XContentParser.Token.START_OBJECT, parser.nextToken());
Exception e = expectThrows(ParsingException.class, () -> AggregatorFactories.parseAggregators(parseContext)); Exception e = expectThrows(ParsingException.class, () -> AggregatorFactories.parseAggregators(parser));
assertThat(e.toString(), containsString("Expected [START_OBJECT] under [field], but got a [VALUE_STRING] in [cardinality]")); assertThat(e.toString(), containsString("Expected [START_OBJECT] under [field], but got a [VALUE_STRING] in [cardinality]"));
} }
@ -238,9 +231,8 @@ public class AggregatorFactoriesTests extends ESTestCase {
.endObject() .endObject()
.endObject(); .endObject();
XContentParser parser = createParser(source); XContentParser parser = createParser(source);
QueryParseContext parseContext = new QueryParseContext(parser);
assertSame(XContentParser.Token.START_OBJECT, parser.nextToken()); assertSame(XContentParser.Token.START_OBJECT, parser.nextToken());
Exception e = expectThrows(ParsingException.class, () -> AggregatorFactories.parseAggregators(parseContext)); Exception e = expectThrows(ParsingException.class, () -> AggregatorFactories.parseAggregators(parser));
assertThat(e.toString(), containsString("Expected [START_OBJECT] under [field], but got a [VALUE_STRING] in [tag_count]")); assertThat(e.toString(), containsString("Expected [START_OBJECT] under [field], but got a [VALUE_STRING] in [tag_count]"));
} }

View File

@ -32,7 +32,6 @@ import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.env.Environment; import org.elasticsearch.env.Environment;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.indices.IndicesModule; import org.elasticsearch.indices.IndicesModule;
import org.elasticsearch.search.SearchModule; import org.elasticsearch.search.SearchModule;
import org.elasticsearch.search.aggregations.pipeline.AbstractPipelineAggregationBuilder; import org.elasticsearch.search.aggregations.pipeline.AbstractPipelineAggregationBuilder;
@ -117,9 +116,8 @@ public abstract class BasePipelineAggregationTestCase<AF extends AbstractPipelin
} }
protected PipelineAggregationBuilder parse(XContentParser parser) throws IOException { protected PipelineAggregationBuilder parse(XContentParser parser) throws IOException {
QueryParseContext parseContext = new QueryParseContext(parser);
assertSame(XContentParser.Token.START_OBJECT, parser.nextToken()); assertSame(XContentParser.Token.START_OBJECT, parser.nextToken());
AggregatorFactories.Builder parsed = AggregatorFactories.parseAggregators(parseContext); AggregatorFactories.Builder parsed = AggregatorFactories.parseAggregators(parser);
assertThat(parsed.getAggregatorFactories(), hasSize(0)); assertThat(parsed.getAggregatorFactories(), hasSize(0));
assertThat(parsed.getPipelineAggregatorFactories(), hasSize(1)); assertThat(parsed.getPipelineAggregatorFactories(), hasSize(1));
PipelineAggregationBuilder newAgg = parsed.getPipelineAggregatorFactories().get(0); PipelineAggregationBuilder newAgg = parsed.getPipelineAggregatorFactories().get(0);

View File

@ -67,12 +67,12 @@ public class InternalOrderTests extends AbstractSerializingTestCase<BucketOrder>
protected BucketOrder doParseInstance(XContentParser parser) throws IOException { protected BucketOrder doParseInstance(XContentParser parser) throws IOException {
Token token = parser.nextToken(); Token token = parser.nextToken();
if (token == Token.START_OBJECT) { if (token == Token.START_OBJECT) {
return InternalOrder.Parser.parseOrderParam(parser, null); return InternalOrder.Parser.parseOrderParam(parser);
} }
if (token == Token.START_ARRAY) { if (token == Token.START_ARRAY) {
List<BucketOrder> orders = new ArrayList<>(); List<BucketOrder> orders = new ArrayList<>();
while (parser.nextToken() == Token.START_OBJECT) { while (parser.nextToken() == Token.START_OBJECT) {
orders.add(InternalOrder.Parser.parseOrderParam(parser, null)); orders.add(InternalOrder.Parser.parseOrderParam(parser));
} }
return BucketOrder.compound(orders); return BucketOrder.compound(orders);
} }

View File

@ -27,9 +27,9 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardException; import org.elasticsearch.index.query.QueryShardException;
import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.plugins.SearchPlugin;
@ -116,7 +116,7 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase {
.significanceHeuristic(new SimpleHeuristic()) .significanceHeuristic(new SimpleHeuristic())
.minDocCount(1) .minDocCount(1)
) )
); );
}else }else
{ {
request = client().prepareSearch(INDEX_NAME).setTypes(DOC_TYPE) request = client().prepareSearch(INDEX_NAME).setTypes(DOC_TYPE)
@ -128,9 +128,9 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase {
.significanceHeuristic(new SimpleHeuristic()) .significanceHeuristic(new SimpleHeuristic())
.minDocCount(1) .minDocCount(1)
) )
); );
} }
SearchResponse response = request.execute().actionGet(); SearchResponse response = request.execute().actionGet();
assertSearchResponse(response); assertSearchResponse(response);
StringTerms classes = response.getAggregations().get("class"); StringTerms classes = response.getAggregations().get("class");
@ -177,7 +177,7 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase {
@Override @Override
public List<SearchExtensionSpec<SignificanceHeuristic, SignificanceHeuristicParser>> getSignificanceHeuristics() { public List<SearchExtensionSpec<SignificanceHeuristic, SignificanceHeuristicParser>> getSignificanceHeuristics() {
return singletonList(new SearchExtensionSpec<SignificanceHeuristic, SignificanceHeuristicParser>(SimpleHeuristic.NAME, return singletonList(new SearchExtensionSpec<SignificanceHeuristic, SignificanceHeuristicParser>(SimpleHeuristic.NAME,
SimpleHeuristic::new, (context) -> SimpleHeuristic.parse(context))); SimpleHeuristic::new, (parser) -> SimpleHeuristic.parse(parser)));
} }
@Override @Override
@ -257,9 +257,9 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase {
return subsetFreq / subsetSize > supersetFreq / supersetSize ? 2.0 : 1.0; return subsetFreq / subsetSize > supersetFreq / supersetSize ? 2.0 : 1.0;
} }
public static SignificanceHeuristic parse(QueryParseContext context) public static SignificanceHeuristic parse(XContentParser parser)
throws IOException, QueryShardException { throws IOException, QueryShardException {
context.parser().nextToken(); parser.nextToken();
return new SimpleHeuristic(); return new SimpleHeuristic();
} }
} }
@ -268,23 +268,23 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase {
String type = randomBoolean() ? "text" : "long"; String type = randomBoolean() ? "text" : "long";
String settings = "{\"index.number_of_shards\": 1, \"index.number_of_replicas\": 0}"; String settings = "{\"index.number_of_shards\": 1, \"index.number_of_replicas\": 0}";
SharedSignificantTermsTestMethods.index01Docs(type, settings, this); SharedSignificantTermsTestMethods.index01Docs(type, settings, this);
SearchRequestBuilder request; SearchRequestBuilder request;
if ("text".equals(type) && randomBoolean() ) { if ("text".equals(type) && randomBoolean() ) {
// Use significant_text on text fields but occasionally run with alternative of // Use significant_text on text fields but occasionally run with alternative of
// significant_terms on legacy fieldData=true too. // significant_terms on legacy fieldData=true too.
request = client().prepareSearch(INDEX_NAME).setTypes(DOC_TYPE) request = client().prepareSearch(INDEX_NAME).setTypes(DOC_TYPE)
.addAggregation(terms("class").field(CLASS_FIELD) .addAggregation(terms("class").field(CLASS_FIELD)
.subAggregation(significantText("sig_terms", TEXT_FIELD))); .subAggregation(significantText("sig_terms", TEXT_FIELD)));
} else { } else {
request = client().prepareSearch(INDEX_NAME).setTypes(DOC_TYPE) request = client().prepareSearch(INDEX_NAME).setTypes(DOC_TYPE)
.addAggregation(terms("class").field(CLASS_FIELD) .addAggregation(terms("class").field(CLASS_FIELD)
.subAggregation(significantTerms("sig_terms").field(TEXT_FIELD))); .subAggregation(significantTerms("sig_terms").field(TEXT_FIELD)));
} }
SearchResponse response = request.execute().actionGet(); SearchResponse response = request.execute().actionGet();
assertSearchResponse(response); assertSearchResponse(response);
StringTerms classes = response.getAggregations().get("class"); StringTerms classes = response.getAggregations().get("class");
assertThat(classes.getBuckets().size(), equalTo(2)); assertThat(classes.getBuckets().size(), equalTo(2));
@ -368,8 +368,8 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase {
indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "1").setSource(TEXT_FIELD, text, CLASS_FIELD, "1")); indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "1").setSource(TEXT_FIELD, text, CLASS_FIELD, "1"));
} }
indexRandom(true, false, indexRequestBuilderList); indexRandom(true, false, indexRequestBuilderList);
SearchRequestBuilder request; SearchRequestBuilder request;
if (randomBoolean() ) { if (randomBoolean() ) {
request = client().prepareSearch(INDEX_NAME).setTypes(DOC_TYPE) request = client().prepareSearch(INDEX_NAME).setTypes(DOC_TYPE)
@ -379,7 +379,7 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase {
.subAggregation( .subAggregation(
significantTerms("sig_terms") significantTerms("sig_terms")
.field(TEXT_FIELD) .field(TEXT_FIELD)
.minDocCount(1))); .minDocCount(1)));
}else }else
{ {
request = client().prepareSearch(INDEX_NAME).setTypes(DOC_TYPE) request = client().prepareSearch(INDEX_NAME).setTypes(DOC_TYPE)
@ -388,9 +388,9 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase {
.field(CLASS_FIELD) .field(CLASS_FIELD)
.subAggregation( .subAggregation(
significantText("sig_terms", TEXT_FIELD) significantText("sig_terms", TEXT_FIELD)
.minDocCount(1))); .minDocCount(1)));
} }
request.execute().actionGet(); request.execute().actionGet();
} }
@ -409,7 +409,7 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase {
// 2. filter buckets and set the background to the other class and set is_background false // 2. filter buckets and set the background to the other class and set is_background false
// both should yield exact same result // both should yield exact same result
public void testBackgroundVsSeparateSet(SignificanceHeuristic significanceHeuristicExpectingSuperset, public void testBackgroundVsSeparateSet(SignificanceHeuristic significanceHeuristicExpectingSuperset,
SignificanceHeuristic significanceHeuristicExpectingSeparateSets, SignificanceHeuristic significanceHeuristicExpectingSeparateSets,
String type) throws Exception { String type) throws Exception {
final boolean useSigText = randomBoolean() && type.equals("text"); final boolean useSigText = randomBoolean() && type.equals("text");
@ -422,7 +422,7 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase {
significantText("sig_terms", TEXT_FIELD) significantText("sig_terms", TEXT_FIELD)
.minDocCount(1) .minDocCount(1)
.significanceHeuristic( .significanceHeuristic(
significanceHeuristicExpectingSuperset))); significanceHeuristicExpectingSuperset)));
}else }else
{ {
request1 = client().prepareSearch(INDEX_NAME).setTypes(DOC_TYPE) request1 = client().prepareSearch(INDEX_NAME).setTypes(DOC_TYPE)
@ -433,12 +433,12 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase {
.field(TEXT_FIELD) .field(TEXT_FIELD)
.minDocCount(1) .minDocCount(1)
.significanceHeuristic( .significanceHeuristic(
significanceHeuristicExpectingSuperset))); significanceHeuristicExpectingSuperset)));
} }
SearchResponse response1 = request1.execute().actionGet(); SearchResponse response1 = request1.execute().actionGet();
assertSearchResponse(response1); assertSearchResponse(response1);
SearchRequestBuilder request2; SearchRequestBuilder request2;
if (useSigText) { if (useSigText) {
request2 = client().prepareSearch(INDEX_NAME).setTypes(DOC_TYPE) request2 = client().prepareSearch(INDEX_NAME).setTypes(DOC_TYPE)
@ -451,7 +451,7 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase {
.subAggregation(significantText("sig_terms", TEXT_FIELD) .subAggregation(significantText("sig_terms", TEXT_FIELD)
.minDocCount(1) .minDocCount(1)
.backgroundFilter(QueryBuilders.termQuery(CLASS_FIELD, "0")) .backgroundFilter(QueryBuilders.termQuery(CLASS_FIELD, "0"))
.significanceHeuristic(significanceHeuristicExpectingSeparateSets))); .significanceHeuristic(significanceHeuristicExpectingSeparateSets)));
}else }else
{ {
request2 = client().prepareSearch(INDEX_NAME).setTypes(DOC_TYPE) request2 = client().prepareSearch(INDEX_NAME).setTypes(DOC_TYPE)
@ -466,9 +466,9 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase {
.field(TEXT_FIELD) .field(TEXT_FIELD)
.minDocCount(1) .minDocCount(1)
.backgroundFilter(QueryBuilders.termQuery(CLASS_FIELD, "0")) .backgroundFilter(QueryBuilders.termQuery(CLASS_FIELD, "0"))
.significanceHeuristic(significanceHeuristicExpectingSeparateSets))); .significanceHeuristic(significanceHeuristicExpectingSeparateSets)));
} }
SearchResponse response2 = request2.execute().actionGet(); SearchResponse response2 = request2.execute().actionGet();
StringTerms classes = response1.getAggregations().get("class"); StringTerms classes = response1.getAggregations().get("class");
@ -513,17 +513,17 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase {
.field("text") .field("text")
.executionHint(randomExecutionHint()) .executionHint(randomExecutionHint())
.significanceHeuristic(heuristic) .significanceHeuristic(heuristic)
.minDocCount(1).shardSize(1000).size(1000))); .minDocCount(1).shardSize(1000).size(1000)));
}else }else
{ {
request = client().prepareSearch("test") request = client().prepareSearch("test")
.addAggregation(terms("class").field("class").subAggregation(significantText("mySignificantTerms", "text") .addAggregation(terms("class").field("class").subAggregation(significantText("mySignificantTerms", "text")
.significanceHeuristic(heuristic) .significanceHeuristic(heuristic)
.minDocCount(1).shardSize(1000).size(1000))); .minDocCount(1).shardSize(1000).size(1000)));
} }
SearchResponse response = request.execute().actionGet(); SearchResponse response = request.execute().actionGet();
assertSearchResponse(response); assertSearchResponse(response);
assertSearchResponse(response); assertSearchResponse(response);
StringTerms classes = response.getAggregations().get("class"); StringTerms classes = response.getAggregations().get("class");
assertThat(classes.getBuckets().size(), equalTo(2)); assertThat(classes.getBuckets().size(), equalTo(2));
@ -580,14 +580,14 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase {
String type = randomBoolean() ? "text" : "long"; String type = randomBoolean() ? "text" : "long";
indexRandomFrequencies01(type); indexRandomFrequencies01(type);
ScriptHeuristic scriptHeuristic = getScriptSignificanceHeuristic(); ScriptHeuristic scriptHeuristic = getScriptSignificanceHeuristic();
SearchRequestBuilder request; SearchRequestBuilder request;
if ("text".equals(type) && randomBoolean()) { if ("text".equals(type) && randomBoolean()) {
request = client().prepareSearch(INDEX_NAME) request = client().prepareSearch(INDEX_NAME)
.addAggregation(terms("class").field(CLASS_FIELD) .addAggregation(terms("class").field(CLASS_FIELD)
.subAggregation(significantText("mySignificantTerms", TEXT_FIELD) .subAggregation(significantText("mySignificantTerms", TEXT_FIELD)
.significanceHeuristic(scriptHeuristic) .significanceHeuristic(scriptHeuristic)
.minDocCount(1).shardSize(2).size(2))); .minDocCount(1).shardSize(2).size(2)));
}else }else
{ {
request = client().prepareSearch(INDEX_NAME) request = client().prepareSearch(INDEX_NAME)
@ -596,10 +596,10 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase {
.field(TEXT_FIELD) .field(TEXT_FIELD)
.executionHint(randomExecutionHint()) .executionHint(randomExecutionHint())
.significanceHeuristic(scriptHeuristic) .significanceHeuristic(scriptHeuristic)
.minDocCount(1).shardSize(2).size(2))); .minDocCount(1).shardSize(2).size(2)));
} }
SearchResponse response = request.execute().actionGet(); SearchResponse response = request.execute().actionGet();
assertSearchResponse(response); assertSearchResponse(response);
for (Terms.Bucket classBucket : ((Terms) response.getAggregations().get("class")).getBuckets()) { for (Terms.Bucket classBucket : ((Terms) response.getAggregations().get("class")).getBuckets()) {
SignificantTerms sigTerms = classBucket.getAggregations().get("mySignificantTerms"); SignificantTerms sigTerms = classBucket.getAggregations().get("mySignificantTerms");
for (SignificantTerms.Bucket bucket : sigTerms.getBuckets()) { for (SignificantTerms.Bucket bucket : sigTerms.getBuckets()) {
@ -666,8 +666,8 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase {
// Test that a request using a script does not get cached // Test that a request using a script does not get cached
ScriptHeuristic scriptHeuristic = getScriptSignificanceHeuristic(); ScriptHeuristic scriptHeuristic = getScriptSignificanceHeuristic();
boolean useSigText = randomBoolean(); boolean useSigText = randomBoolean();
SearchResponse r; SearchResponse r;
if (useSigText) { if (useSigText) {
r = client().prepareSearch("cache_test_idx").setSize(0) r = client().prepareSearch("cache_test_idx").setSize(0)
.addAggregation(significantText("foo", "s").significanceHeuristic(scriptHeuristic)).get(); .addAggregation(significantText("foo", "s").significanceHeuristic(scriptHeuristic)).get();
@ -696,7 +696,7 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase {
assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache()
.getMissCount(), equalTo(1L)); .getMissCount(), equalTo(1L));
} }
} }

View File

@ -21,7 +21,6 @@ package org.elasticsearch.search.aggregations.bucket.geogrid;
import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.instanceOf;
@ -29,33 +28,30 @@ import static org.hamcrest.Matchers.instanceOf;
public class GeoHashGridParserTests extends ESTestCase { public class GeoHashGridParserTests extends ESTestCase {
public void testParseValidFromInts() throws Exception { public void testParseValidFromInts() throws Exception {
int precision = randomIntBetween(1, 12); int precision = randomIntBetween(1, 12);
XContentParser stParser = createParser(JsonXContent.jsonXContent, XContentParser stParser = createParser(JsonXContent.jsonXContent,
"{\"field\":\"my_loc\", \"precision\":" + precision + ", \"size\": 500, \"shard_size\": 550}"); "{\"field\":\"my_loc\", \"precision\":" + precision + ", \"size\": 500, \"shard_size\": 550}");
QueryParseContext parseContext = new QueryParseContext(stParser);
XContentParser.Token token = stParser.nextToken(); XContentParser.Token token = stParser.nextToken();
assertSame(XContentParser.Token.START_OBJECT, token); assertSame(XContentParser.Token.START_OBJECT, token);
// can create a factory // can create a factory
assertNotNull(GeoGridAggregationBuilder.parse("geohash_grid", parseContext)); assertNotNull(GeoGridAggregationBuilder.parse("geohash_grid", stParser));
} }
public void testParseValidFromStrings() throws Exception { public void testParseValidFromStrings() throws Exception {
int precision = randomIntBetween(1, 12); int precision = randomIntBetween(1, 12);
XContentParser stParser = createParser(JsonXContent.jsonXContent, XContentParser stParser = createParser(JsonXContent.jsonXContent,
"{\"field\":\"my_loc\", \"precision\":\"" + precision + "\", \"size\": \"500\", \"shard_size\": \"550\"}"); "{\"field\":\"my_loc\", \"precision\":\"" + precision + "\", \"size\": \"500\", \"shard_size\": \"550\"}");
QueryParseContext parseContext = new QueryParseContext(stParser);
XContentParser.Token token = stParser.nextToken(); XContentParser.Token token = stParser.nextToken();
assertSame(XContentParser.Token.START_OBJECT, token); assertSame(XContentParser.Token.START_OBJECT, token);
// can create a factory // can create a factory
assertNotNull(GeoGridAggregationBuilder.parse("geohash_grid", parseContext)); assertNotNull(GeoGridAggregationBuilder.parse("geohash_grid", stParser));
} }
public void testParseErrorOnNonIntPrecision() throws Exception { public void testParseErrorOnNonIntPrecision() throws Exception {
XContentParser stParser = createParser(JsonXContent.jsonXContent, "{\"field\":\"my_loc\", \"precision\":\"2.0\"}"); XContentParser stParser = createParser(JsonXContent.jsonXContent, "{\"field\":\"my_loc\", \"precision\":\"2.0\"}");
QueryParseContext parseContext = new QueryParseContext(stParser);
XContentParser.Token token = stParser.nextToken(); XContentParser.Token token = stParser.nextToken();
assertSame(XContentParser.Token.START_OBJECT, token); assertSame(XContentParser.Token.START_OBJECT, token);
try { try {
GeoGridAggregationBuilder.parse("geohash_grid", parseContext); GeoGridAggregationBuilder.parse("geohash_grid", stParser);
fail(); fail();
} catch (ParsingException ex) { } catch (ParsingException ex) {
assertThat(ex.getCause(), instanceOf(NumberFormatException.class)); assertThat(ex.getCause(), instanceOf(NumberFormatException.class));
@ -65,11 +61,10 @@ public class GeoHashGridParserTests extends ESTestCase {
public void testParseErrorOnBooleanPrecision() throws Exception { public void testParseErrorOnBooleanPrecision() throws Exception {
XContentParser stParser = createParser(JsonXContent.jsonXContent, "{\"field\":\"my_loc\", \"precision\":false}"); XContentParser stParser = createParser(JsonXContent.jsonXContent, "{\"field\":\"my_loc\", \"precision\":false}");
QueryParseContext parseContext = new QueryParseContext(stParser);
XContentParser.Token token = stParser.nextToken(); XContentParser.Token token = stParser.nextToken();
assertSame(XContentParser.Token.START_OBJECT, token); assertSame(XContentParser.Token.START_OBJECT, token);
try { try {
GeoGridAggregationBuilder.parse("geohash_grid", parseContext); GeoGridAggregationBuilder.parse("geohash_grid", stParser);
fail(); fail();
} catch (IllegalArgumentException ex) { } catch (IllegalArgumentException ex) {
assertEquals("[geohash_grid] precision doesn't support values of type: VALUE_BOOLEAN", ex.getMessage()); assertEquals("[geohash_grid] precision doesn't support values of type: VALUE_BOOLEAN", ex.getMessage());
@ -78,11 +73,10 @@ public class GeoHashGridParserTests extends ESTestCase {
public void testParseErrorOnPrecisionOutOfRange() throws Exception { public void testParseErrorOnPrecisionOutOfRange() throws Exception {
XContentParser stParser = createParser(JsonXContent.jsonXContent, "{\"field\":\"my_loc\", \"precision\":\"13\"}"); XContentParser stParser = createParser(JsonXContent.jsonXContent, "{\"field\":\"my_loc\", \"precision\":\"13\"}");
QueryParseContext parseContext = new QueryParseContext(stParser);
XContentParser.Token token = stParser.nextToken(); XContentParser.Token token = stParser.nextToken();
assertSame(XContentParser.Token.START_OBJECT, token); assertSame(XContentParser.Token.START_OBJECT, token);
try { try {
GeoGridAggregationBuilder.parse("geohash_grid", parseContext); GeoGridAggregationBuilder.parse("geohash_grid", stParser);
fail(); fail();
} catch (ParsingException ex) { } catch (ParsingException ex) {
assertThat(ex.getCause(), instanceOf(IllegalArgumentException.class)); assertThat(ex.getCause(), instanceOf(IllegalArgumentException.class));

View File

@ -33,7 +33,6 @@ import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.index.Index; import org.elasticsearch.index.Index;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.SearchModule; import org.elasticsearch.search.SearchModule;
import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.SearchShardTarget;
@ -267,9 +266,8 @@ public class SignificanceHeuristicTests extends ESTestCase {
try { try {
XContentParser stParser = createParser(JsonXContent.jsonXContent, XContentParser stParser = createParser(JsonXContent.jsonXContent,
"{\"field\":\"text\", " + faultyHeuristicDefinition + ",\"min_doc_count\":200}"); "{\"field\":\"text\", " + faultyHeuristicDefinition + ",\"min_doc_count\":200}");
QueryParseContext parseContext = new QueryParseContext(stParser);
stParser.nextToken(); stParser.nextToken();
SignificantTermsAggregationBuilder.getParser(significanceHeuristicParserRegistry).parse("testagg", parseContext); SignificantTermsAggregationBuilder.getParser(significanceHeuristicParserRegistry).parse("testagg", stParser);
fail(); fail();
} catch (ParsingException e) { } catch (ParsingException e) {
assertThat(e.getCause().getMessage(), containsString(expectedError)); assertThat(e.getCause().getMessage(), containsString(expectedError));
@ -286,14 +284,13 @@ public class SignificanceHeuristicTests extends ESTestCase {
return parseSignificanceHeuristic(significanceHeuristicParserRegistry, stParser); return parseSignificanceHeuristic(significanceHeuristicParserRegistry, stParser);
} }
private SignificanceHeuristic parseSignificanceHeuristic( private static SignificanceHeuristic parseSignificanceHeuristic(
ParseFieldRegistry<SignificanceHeuristicParser> significanceHeuristicParserRegistry, ParseFieldRegistry<SignificanceHeuristicParser> significanceHeuristicParserRegistry,
XContentParser stParser) throws IOException { XContentParser stParser) throws IOException {
QueryParseContext parseContext = new QueryParseContext(stParser);
stParser.nextToken(); stParser.nextToken();
SignificantTermsAggregationBuilder aggregatorFactory = SignificantTermsAggregationBuilder aggregatorFactory =
(SignificantTermsAggregationBuilder) SignificantTermsAggregationBuilder.getParser( (SignificantTermsAggregationBuilder) SignificantTermsAggregationBuilder.getParser(
significanceHeuristicParserRegistry).parse("testagg", parseContext); significanceHeuristicParserRegistry).parse("testagg", stParser);
stParser.nextToken(); stParser.nextToken();
assertThat(aggregatorFactory.getBucketCountThresholds().getMinDocCount(), equalTo(200L)); assertThat(aggregatorFactory.getBucketCountThresholds().getMinDocCount(), equalTo(200L));
assertThat(stParser.currentToken(), equalTo(null)); assertThat(stParser.currentToken(), equalTo(null));

View File

@ -26,7 +26,6 @@ import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.query.MatchNoneQueryBuilder; import org.elasticsearch.index.query.MatchNoneQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.search.aggregations.BaseAggregationTestCase; import org.elasticsearch.search.aggregations.BaseAggregationTestCase;
import org.elasticsearch.search.aggregations.bucket.filters.FiltersAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.filters.FiltersAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.filters.FiltersAggregator.KeyedFilter; import org.elasticsearch.search.aggregations.bucket.filters.FiltersAggregator.KeyedFilter;
@ -87,8 +86,7 @@ public class FiltersTests extends BaseAggregationTestCase<FiltersAggregationBuil
builder.endObject(); builder.endObject();
XContentParser parser = createParser(shuffleXContent(builder)); XContentParser parser = createParser(shuffleXContent(builder));
parser.nextToken(); parser.nextToken();
QueryParseContext context = new QueryParseContext(parser); FiltersAggregationBuilder filters = FiltersAggregationBuilder.parse("agg_name", parser);
FiltersAggregationBuilder filters = FiltersAggregationBuilder.parse("agg_name", context);
// The other bucket is disabled by default // The other bucket is disabled by default
assertFalse(filters.otherBucket()); assertFalse(filters.otherBucket());
@ -99,8 +97,7 @@ public class FiltersTests extends BaseAggregationTestCase<FiltersAggregationBuil
builder.endObject(); builder.endObject();
parser = createParser(shuffleXContent(builder)); parser = createParser(shuffleXContent(builder));
parser.nextToken(); parser.nextToken();
context = new QueryParseContext(parser); filters = FiltersAggregationBuilder.parse("agg_name", parser);
filters = FiltersAggregationBuilder.parse("agg_name", context);
// but setting a key enables it automatically // but setting a key enables it automatically
assertTrue(filters.otherBucket()); assertTrue(filters.otherBucket());
@ -112,8 +109,7 @@ public class FiltersTests extends BaseAggregationTestCase<FiltersAggregationBuil
builder.endObject(); builder.endObject();
parser = createParser(shuffleXContent(builder)); parser = createParser(shuffleXContent(builder));
parser.nextToken(); parser.nextToken();
context = new QueryParseContext(parser); filters = FiltersAggregationBuilder.parse("agg_name", parser);
filters = FiltersAggregationBuilder.parse("agg_name", context);
// unless the other bucket is explicitly disabled // unless the other bucket is explicitly disabled
assertFalse(filters.otherBucket()); assertFalse(filters.otherBucket());
} }

View File

@ -21,8 +21,6 @@ package org.elasticsearch.search.aggregations.metrics;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.script.Script;
import org.elasticsearch.search.aggregations.AggregationInitializationException; import org.elasticsearch.search.aggregations.AggregationInitializationException;
import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.BaseAggregationTestCase; import org.elasticsearch.search.aggregations.BaseAggregationTestCase;
@ -189,9 +187,8 @@ public class TopHitsTests extends BaseAggregationTestCase<TopHitsAggregationBuil
" }\n" + " }\n" +
"}"; "}";
XContentParser parser = createParser(JsonXContent.jsonXContent, source); XContentParser parser = createParser(JsonXContent.jsonXContent, source);
QueryParseContext parseContext = new QueryParseContext(parser);
assertSame(XContentParser.Token.START_OBJECT, parser.nextToken()); assertSame(XContentParser.Token.START_OBJECT, parser.nextToken());
Exception e = expectThrows(AggregationInitializationException.class, () -> AggregatorFactories.parseAggregators(parseContext)); Exception e = expectThrows(AggregationInitializationException.class, () -> AggregatorFactories.parseAggregators(parser));
assertThat(e.toString(), containsString("Aggregator [top_tags_hits] of type [top_hits] cannot accept sub-aggregations")); assertThat(e.toString(), containsString("Aggregator [top_tags_hits] of type [top_hits] cannot accept sub-aggregations"));
} }

Some files were not shown because too many files have changed in this diff Show More