Remove parser argument from methods where we already pass in a parse context

When we pass down both parser and QueryParseContext to a method, we cannot
make sure that the parser contained in the context and the parser that is
parsed as an argument have the same state. This removes the parser argument
from methods where we currently have both the parser and the parse context
as arguments and instead retrieves the parse from the context inside the
method.
This commit is contained in:
Christoph Büscher 2016-04-13 22:29:57 +02:00
parent ed3a71fa2b
commit e15e7f7e6e
53 changed files with 314 additions and 159 deletions

View File

@ -219,7 +219,7 @@ public class TransportPercolateAction extends HandledTransportAction<PercolateRe
try (XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(source)) { try (XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(source)) {
context.reset(parser); context.reset(parser);
context.parseFieldMatcher(parseFieldMatcher); context.parseFieldMatcher(parseFieldMatcher);
searchSourceBuilder.parseXContent(parser, context, aggParsers, null); searchSourceBuilder.parseXContent(context, aggParsers, null);
searchRequest.source(searchSourceBuilder); searchRequest.source(searchSourceBuilder);
return searchRequest; return searchRequest;
} }

View File

@ -250,7 +250,7 @@ public class HasChildQueryBuilder extends AbstractQueryBuilder<HasChildQueryBuil
if (parseContext.parseFieldMatcher().match(currentFieldName, QUERY_FIELD)) { if (parseContext.parseFieldMatcher().match(currentFieldName, QUERY_FIELD)) {
iqb = parseContext.parseInnerQueryBuilder(); iqb = parseContext.parseInnerQueryBuilder();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, INNER_HITS_FIELD)) { } else if (parseContext.parseFieldMatcher().match(currentFieldName, INNER_HITS_FIELD)) {
innerHitBuilder = InnerHitBuilder.fromXContent(parser, parseContext); innerHitBuilder = InnerHitBuilder.fromXContent(parseContext);
} else { } else {
throw new ParsingException(parser.getTokenLocation(), "[has_child] query does not support [" + currentFieldName + "]"); throw new ParsingException(parser.getTokenLocation(), "[has_child] query does not support [" + currentFieldName + "]");
} }

View File

@ -252,7 +252,7 @@ public class HasParentQueryBuilder extends AbstractQueryBuilder<HasParentQueryBu
if (parseContext.parseFieldMatcher().match(currentFieldName, QUERY_FIELD)) { if (parseContext.parseFieldMatcher().match(currentFieldName, QUERY_FIELD)) {
iqb = parseContext.parseInnerQueryBuilder(); iqb = parseContext.parseInnerQueryBuilder();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, INNER_HITS_FIELD)) { } else if (parseContext.parseFieldMatcher().match(currentFieldName, INNER_HITS_FIELD)) {
innerHits = InnerHitBuilder.fromXContent(parser, parseContext); innerHits = InnerHitBuilder.fromXContent(parseContext);
} else { } else {
throw new ParsingException(parser.getTokenLocation(), "[has_parent] query does not support [" + currentFieldName + "]"); throw new ParsingException(parser.getTokenLocation(), "[has_parent] query does not support [" + currentFieldName + "]");
} }

View File

@ -163,7 +163,7 @@ public class NestedQueryBuilder extends AbstractQueryBuilder<NestedQueryBuilder>
float boost = AbstractQueryBuilder.DEFAULT_BOOST; float boost = AbstractQueryBuilder.DEFAULT_BOOST;
ScoreMode scoreMode = ScoreMode.Avg; ScoreMode scoreMode = ScoreMode.Avg;
String queryName = null; String queryName = null;
QueryBuilder query = null; QueryBuilder<?> query = null;
String path = null; String path = null;
String currentFieldName = null; String currentFieldName = null;
InnerHitBuilder innerHitBuilder = null; InnerHitBuilder innerHitBuilder = null;
@ -176,7 +176,7 @@ public class NestedQueryBuilder extends AbstractQueryBuilder<NestedQueryBuilder>
if (parseContext.parseFieldMatcher().match(currentFieldName, QUERY_FIELD)) { if (parseContext.parseFieldMatcher().match(currentFieldName, QUERY_FIELD)) {
query = parseContext.parseInnerQueryBuilder(); query = parseContext.parseInnerQueryBuilder();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, INNER_HITS_FIELD)) { } else if (parseContext.parseFieldMatcher().match(currentFieldName, INNER_HITS_FIELD)) {
innerHitBuilder = InnerHitBuilder.fromXContent(parser, parseContext); innerHitBuilder = InnerHitBuilder.fromXContent(parseContext);
} else { } else {
throw new ParsingException(parser.getTokenLocation(), "[nested] query does not support [" + currentFieldName + "]"); throw new ParsingException(parser.getTokenLocation(), "[nested] query does not support [" + currentFieldName + "]");
} }

View File

@ -97,7 +97,8 @@ public final class DecayFunctionParser<DFB extends DecayFunctionBuilder<DFB>> im
* </pre> * </pre>
*/ */
@Override @Override
public DFB fromXContent(QueryParseContext context, XContentParser parser) throws IOException, ParsingException { public DFB fromXContent(QueryParseContext context) throws IOException, ParsingException {
XContentParser parser = context.parser();
String currentFieldName; String currentFieldName;
XContentParser.Token token; XContentParser.Token token;
MultiValueMode multiValueMode = DecayFunctionBuilder.DEFAULT_MULTI_VALUE_MODE; MultiValueMode multiValueMode = DecayFunctionBuilder.DEFAULT_MULTI_VALUE_MODE;

View File

@ -159,8 +159,9 @@ public class FieldValueFactorFunctionBuilder extends ScoreFunctionBuilder<FieldV
return new FieldValueFactorFunction(field, factor, modifier, missing, fieldData); return new FieldValueFactorFunction(field, factor, modifier, missing, fieldData);
} }
public static FieldValueFactorFunctionBuilder fromXContent(QueryParseContext parseContext, XContentParser parser) public static FieldValueFactorFunctionBuilder fromXContent(QueryParseContext parseContext)
throws IOException, ParsingException { throws IOException, ParsingException {
XContentParser parser = parseContext.parser();
String currentFieldName = null; String currentFieldName = null;
String field = null; String field = null;
float boostFactor = FieldValueFactorFunctionBuilder.DEFAULT_FACTOR; float boostFactor = FieldValueFactorFunctionBuilder.DEFAULT_FACTOR;

View File

@ -477,7 +477,7 @@ public class FunctionScoreQueryBuilder extends AbstractQueryBuilder<FunctionScor
// we try to parse a score function. If there is no score function for the current field name, // we try to parse a score function. If there is no score function for the current field name,
// getScoreFunction will throw. // getScoreFunction will throw.
ScoreFunctionBuilder<?> scoreFunction = scoreFunctionsRegistry.lookup(currentFieldName, parseContext.parser()) ScoreFunctionBuilder<?> scoreFunction = scoreFunctionsRegistry.lookup(currentFieldName, parseContext.parser())
.fromXContent(parseContext, parser); .fromXContent(parseContext);
filterFunctionBuilders.add(new FunctionScoreQueryBuilder.FilterFunctionBuilder(scoreFunction)); filterFunctionBuilders.add(new FunctionScoreQueryBuilder.FilterFunctionBuilder(scoreFunction));
} }
} else if (token == XContentParser.Token.START_ARRAY) { } else if (token == XContentParser.Token.START_ARRAY) {
@ -487,7 +487,7 @@ public class FunctionScoreQueryBuilder extends AbstractQueryBuilder<FunctionScor
handleMisplacedFunctionsDeclaration(parser.getTokenLocation(), errorString); handleMisplacedFunctionsDeclaration(parser.getTokenLocation(), errorString);
} }
functionArrayFound = true; functionArrayFound = true;
currentFieldName = parseFiltersAndFunctions(scoreFunctionsRegistry, parseContext, parser, filterFunctionBuilders); currentFieldName = parseFiltersAndFunctions(scoreFunctionsRegistry, parseContext, filterFunctionBuilders);
} else { } else {
throw new ParsingException(parser.getTokenLocation(), "failed to parse [{}] query. array [{}] is not supported", throw new ParsingException(parser.getTokenLocation(), "failed to parse [{}] query. array [{}] is not supported",
NAME, currentFieldName); NAME, currentFieldName);
@ -555,10 +555,11 @@ public class FunctionScoreQueryBuilder extends AbstractQueryBuilder<FunctionScor
} }
private static String parseFiltersAndFunctions(ParseFieldRegistry<ScoreFunctionParser<?>> scoreFunctionsRegistry, private static String parseFiltersAndFunctions(ParseFieldRegistry<ScoreFunctionParser<?>> scoreFunctionsRegistry,
QueryParseContext parseContext, XContentParser parser, QueryParseContext parseContext, List<FunctionScoreQueryBuilder.FilterFunctionBuilder> filterFunctionBuilders)
List<FunctionScoreQueryBuilder.FilterFunctionBuilder> filterFunctionBuilders) throws IOException { throws IOException {
String currentFieldName = null; String currentFieldName = null;
XContentParser.Token token; XContentParser.Token token;
XContentParser parser = parseContext.parser();
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
QueryBuilder<?> filter = null; QueryBuilder<?> filter = null;
ScoreFunctionBuilder<?> scoreFunction = null; ScoreFunctionBuilder<?> scoreFunction = null;
@ -580,7 +581,7 @@ public class FunctionScoreQueryBuilder extends AbstractQueryBuilder<FunctionScor
"failed to parse function_score functions. already found [{}], now encountering [{}].", "failed to parse function_score functions. already found [{}], now encountering [{}].",
scoreFunction.getName(), currentFieldName); scoreFunction.getName(), currentFieldName);
} }
scoreFunction = scoreFunctionsRegistry.lookup(currentFieldName, parser).fromXContent(parseContext, parser); scoreFunction = scoreFunctionsRegistry.lookup(currentFieldName, parser).fromXContent(parseContext);
} }
} else if (token.isValue()) { } else if (token.isValue()) {
if (parseContext.parseFieldMatcher().match(currentFieldName, WEIGHT_FIELD)) { if (parseContext.parseFieldMatcher().match(currentFieldName, WEIGHT_FIELD)) {

View File

@ -143,8 +143,9 @@ public class RandomScoreFunctionBuilder extends ScoreFunctionBuilder<RandomScore
return Long.hashCode(value); return Long.hashCode(value);
} }
public static RandomScoreFunctionBuilder fromXContent(QueryParseContext parseContext, XContentParser parser) public static RandomScoreFunctionBuilder fromXContent(QueryParseContext parseContext)
throws IOException, ParsingException { throws IOException, ParsingException {
XContentParser parser = parseContext.parser();
RandomScoreFunctionBuilder randomScoreFunctionBuilder = new RandomScoreFunctionBuilder(); RandomScoreFunctionBuilder randomScoreFunctionBuilder = new RandomScoreFunctionBuilder();
String currentFieldName = null; String currentFieldName = null;
XContentParser.Token token; XContentParser.Token token;

View File

@ -20,7 +20,6 @@
package org.elasticsearch.index.query.functionscore; package org.elasticsearch.index.query.functionscore;
import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.QueryParseContext;
import java.io.IOException; import java.io.IOException;
@ -30,5 +29,5 @@ import java.io.IOException;
*/ */
@FunctionalInterface @FunctionalInterface
public interface ScoreFunctionParser<FB extends ScoreFunctionBuilder<FB>> { public interface ScoreFunctionParser<FB extends ScoreFunctionBuilder<FB>> {
FB fromXContent(QueryParseContext context, XContentParser parser) throws IOException, ParsingException; FB fromXContent(QueryParseContext context) throws IOException, ParsingException;
} }

View File

@ -32,9 +32,9 @@ import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.query.QueryShardException; import org.elasticsearch.index.query.QueryShardException;
import org.elasticsearch.script.Script; import org.elasticsearch.script.Script;
import org.elasticsearch.script.Script.ScriptField; import org.elasticsearch.script.Script.ScriptField;
import org.elasticsearch.script.ScriptParameterParser.ScriptParameterValue;
import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.script.ScriptParameterParser; import org.elasticsearch.script.ScriptParameterParser;
import org.elasticsearch.script.ScriptParameterParser.ScriptParameterValue;
import org.elasticsearch.script.SearchScript; import org.elasticsearch.script.SearchScript;
import java.io.IOException; import java.io.IOException;
@ -110,8 +110,9 @@ public class ScriptScoreFunctionBuilder extends ScoreFunctionBuilder<ScriptScore
} }
} }
public static ScriptScoreFunctionBuilder fromXContent(QueryParseContext parseContext, XContentParser parser) public static ScriptScoreFunctionBuilder fromXContent(QueryParseContext parseContext)
throws IOException, ParsingException { throws IOException, ParsingException {
XContentParser parser = parseContext.parser();
ScriptParameterParser scriptParameterParser = new ScriptParameterParser(); ScriptParameterParser scriptParameterParser = new ScriptParameterParser();
Script script = null; Script script = null;
Map<String, Object> vars = null; Map<String, Object> vars = null;

View File

@ -82,7 +82,7 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl
try { try {
List<ScriptField> scriptFields = new ArrayList<>(); List<ScriptField> scriptFields = new ArrayList<>();
for (XContentParser.Token token = p.nextToken(); token != END_OBJECT; token = p.nextToken()) { for (XContentParser.Token token = p.nextToken(); token != END_OBJECT; token = p.nextToken()) {
scriptFields.add(new ScriptField(p, c)); scriptFields.add(new ScriptField(c));
} }
i.setScriptFields(scriptFields); i.setScriptFields(scriptFields);
} catch (IOException e) { } catch (IOException e) {
@ -93,7 +93,7 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl
ObjectParser.ValueType.OBJECT_ARRAY); ObjectParser.ValueType.OBJECT_ARRAY);
PARSER.declareField((p, i, c) -> { PARSER.declareField((p, i, c) -> {
try { try {
i.setFetchSourceContext(FetchSourceContext.parse(p, c)); i.setFetchSourceContext(FetchSourceContext.parse(c));
} catch (IOException e) { } catch (IOException e) {
throw new ParsingException(p.getTokenLocation(), "Could not parse inner _source definition", e); throw new ParsingException(p.getTokenLocation(), "Could not parse inner _source definition", e);
} }
@ -109,7 +109,7 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl
}, SearchSourceBuilder.QUERY_FIELD); }, SearchSourceBuilder.QUERY_FIELD);
PARSER.declareObject(InnerHitBuilder::setInnerHitsBuilder, (p, c) -> { PARSER.declareObject(InnerHitBuilder::setInnerHitsBuilder, (p, c) -> {
try { try {
return InnerHitsBuilder.fromXContent(p, c); return InnerHitsBuilder.fromXContent(c);
} catch (IOException e) { } catch (IOException e) {
throw new ParsingException(p.getTokenLocation(), "Could not parse inner query definition", e); throw new ParsingException(p.getTokenLocation(), "Could not parse inner query definition", e);
} }
@ -579,8 +579,8 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl
fieldDataFields, scriptFields, fetchSourceContext, sorts, highlightBuilder, query, innerHitsBuilder); fieldDataFields, scriptFields, fetchSourceContext, sorts, highlightBuilder, query, innerHitsBuilder);
} }
public static InnerHitBuilder fromXContent(XContentParser parser, QueryParseContext context) throws IOException { public static InnerHitBuilder fromXContent(QueryParseContext context) throws IOException {
return PARSER.parse(parser, new InnerHitBuilder(), context); return PARSER.parse(context.parser(), new InnerHitBuilder(), context);
} }
} }

View File

@ -23,7 +23,6 @@ import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParser.Token; import org.elasticsearch.common.xcontent.XContentParser.Token;
@ -104,13 +103,14 @@ public final class InnerHitsBuilder extends ToXContentToBytes implements Writeab
return innerHitsBuilders.hashCode(); return innerHitsBuilders.hashCode();
} }
public static InnerHitsBuilder fromXContent(XContentParser parser, QueryParseContext context) throws IOException { public static InnerHitsBuilder fromXContent(QueryParseContext context) throws IOException {
Map<String, InnerHitBuilder> innerHitBuilders = new HashMap<>(); Map<String, InnerHitBuilder> innerHitBuilders = new HashMap<>();
String innerHitName = null; String innerHitName = null;
XContentParser parser = context.parser();
for (Token token = parser.nextToken(); token != Token.END_OBJECT; token = parser.nextToken()) { for (Token token = parser.nextToken(); token != Token.END_OBJECT; token = parser.nextToken()) {
switch (token) { switch (token) {
case START_OBJECT: case START_OBJECT:
InnerHitBuilder innerHitBuilder = InnerHitBuilder.fromXContent(parser, context); InnerHitBuilder innerHitBuilder = InnerHitBuilder.fromXContent(context);
innerHitBuilder.setName(innerHitName); innerHitBuilder.setName(innerHitName);
innerHitBuilders.put(innerHitName, innerHitBuilder); innerHitBuilders.put(innerHitName, innerHitBuilder);
break; break;

View File

@ -196,7 +196,7 @@ public class RestMultiSearchAction extends BaseRestHandler {
} else { } else {
try (XContentParser requestParser = XContentFactory.xContent(slice).createParser(slice)) { try (XContentParser requestParser = XContentFactory.xContent(slice).createParser(slice)) {
queryParseContext.reset(requestParser); queryParseContext.reset(requestParser);
searchRequest.source(SearchSourceBuilder.fromXContent(requestParser, queryParseContext, aggParsers, suggesters)); searchRequest.source(SearchSourceBuilder.fromXContent(queryParseContext, aggParsers, suggesters));
} }
} }
// move pointers // move pointers

View File

@ -130,7 +130,7 @@ public class RestSearchAction extends BaseRestHandler {
Template template = TemplateQueryBuilder.parse(parser, context.parseFieldMatcher(), "params", "template"); Template template = TemplateQueryBuilder.parse(parser, context.parseFieldMatcher(), "params", "template");
searchRequest.template(template); searchRequest.template(template);
} else { } else {
searchRequest.source().parseXContent(parser, context, aggParsers, suggesters); searchRequest.source().parseXContent(context, aggParsers, suggesters);
} }
} }
} }

View File

@ -20,6 +20,7 @@
package org.elasticsearch.search; package org.elasticsearch.search;
import com.carrotsearch.hppc.ObjectFloatHashMap; import com.carrotsearch.hppc.ObjectFloatHashMap;
import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.FieldDoc;
import org.apache.lucene.search.Sort; import org.apache.lucene.search.Sort;
import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TopDocs;
@ -571,7 +572,7 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> imp
QueryParseContext queryParseContext = new QueryParseContext(indicesService.getIndicesQueryRegistry()); QueryParseContext queryParseContext = new QueryParseContext(indicesService.getIndicesQueryRegistry());
queryParseContext.reset(parser); queryParseContext.reset(parser);
queryParseContext.parseFieldMatcher(parseFieldMatcher); queryParseContext.parseFieldMatcher(parseFieldMatcher);
parseSource(context, SearchSourceBuilder.fromXContent(parser, queryParseContext, aggParsers, suggesters)); parseSource(context, SearchSourceBuilder.fromXContent(queryParseContext, aggParsers, suggesters));
} }
} }
parseSource(context, request.source()); parseSource(context, request.source());

View File

@ -65,7 +65,7 @@ public class AggregationParseElement implements SearchParseElement {
QueryParseContext parseContext = new QueryParseContext(queriesRegistry); QueryParseContext parseContext = new QueryParseContext(queriesRegistry);
parseContext.reset(parser); parseContext.reset(parser);
parseContext.parseFieldMatcher(context.parseFieldMatcher()); parseContext.parseFieldMatcher(context.parseFieldMatcher());
AggregatorFactories.Builder builders = aggregatorParsers.parseAggregators(parser, parseContext); AggregatorFactories.Builder builders = aggregatorParsers.parseAggregators(parseContext);
AggregationContext aggContext = new AggregationContext(context); AggregationContext aggContext = new AggregationContext(context);
AggregatorFactories factories = builders.build(aggContext, null); AggregatorFactories factories = builders.build(aggContext, null);
factories.validate(); factories.validate();

View File

@ -26,7 +26,6 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.search.aggregations.bucket.BucketsAggregator; import org.elasticsearch.search.aggregations.bucket.BucketsAggregator;
import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.AggregationContext;
@ -60,12 +59,11 @@ public abstract class Aggregator extends BucketCollector implements Releasable {
* aggregation should be skipped (e.g. when trying to aggregate on unmapped fields). * aggregation should be skipped (e.g. when trying to aggregate on unmapped fields).
* *
* @param aggregationName The name of the aggregation * @param aggregationName The name of the aggregation
* @param parser The xcontent parser * @param context The parse context
* @param context The search context
* @return The resolved aggregator factory or {@code null} in case the aggregation should be skipped * @return The resolved aggregator factory or {@code null} in case the aggregation should be skipped
* @throws java.io.IOException When parsing fails * @throws java.io.IOException When parsing fails
*/ */
AggregatorBuilder<?> parse(String aggregationName, XContentParser parser, QueryParseContext context) throws IOException; AggregatorBuilder<?> parse(String aggregationName, QueryParseContext context) throws IOException;
/** /**
* @return an empty {@link AggregatorBuilder} instance for this parser * @return an empty {@link AggregatorBuilder} instance for this parser

View File

@ -71,15 +71,14 @@ public class AggregatorParsers {
/** /**
* Parses the aggregation request recursively generating aggregator factories in turn. * Parses the aggregation request recursively generating aggregator factories in turn.
* *
* @param parser The input xcontent that will be parsed.
* @param parseContext The parse context. * @param parseContext The parse context.
* *
* @return The parsed aggregator factories. * @return The parsed aggregator factories.
* *
* @throws IOException When parsing fails for unknown reasons. * @throws IOException When parsing fails for unknown reasons.
*/ */
public AggregatorFactories.Builder parseAggregators(XContentParser parser, QueryParseContext parseContext) throws IOException { public AggregatorFactories.Builder parseAggregators(QueryParseContext parseContext) throws IOException {
return parseAggregators(parser, parseContext, 0); return parseAggregators(parseContext.parser(), parseContext, 0);
} }
private AggregatorFactories.Builder parseAggregators(XContentParser parser, QueryParseContext parseContext, int level) private AggregatorFactories.Builder parseAggregators(XContentParser parser, QueryParseContext parseContext, int level)
@ -173,10 +172,10 @@ public class AggregatorParsers {
throw new ParsingException(parser.getTokenLocation(), throw new ParsingException(parser.getTokenLocation(),
"Could not find aggregator type [" + fieldName + "] in [" + aggregationName + "]"); "Could not find aggregator type [" + fieldName + "] in [" + aggregationName + "]");
} else { } else {
pipelineAggregatorFactory = pipelineAggregatorParser.parse(aggregationName, parser, parseContext); pipelineAggregatorFactory = pipelineAggregatorParser.parse(aggregationName, parseContext);
} }
} else { } else {
aggFactory = aggregatorParser.parse(aggregationName, parser, parseContext); aggFactory = aggregatorParser.parse(aggregationName, parseContext);
} }
} }
} else { } else {

View File

@ -126,12 +126,12 @@ public class ChildrenAggregatorBuilder extends ValuesSourceAggregatorBuilder<Par
return builder; return builder;
} }
public static ChildrenAggregatorBuilder parse(String aggregationName, XContentParser parser, public static ChildrenAggregatorBuilder parse(String aggregationName, QueryParseContext context) throws IOException {
QueryParseContext context) throws IOException {
String childType = null; String childType = null;
XContentParser.Token token; XContentParser.Token token;
String currentFieldName = null; String currentFieldName = null;
XContentParser parser = context.parser();
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) { if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName(); currentFieldName = parser.currentName();

View File

@ -19,10 +19,10 @@
package org.elasticsearch.search.aggregations.bucket.filter; package org.elasticsearch.search.aggregations.bucket.filter;
import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.Aggregator;
import java.io.IOException; import java.io.IOException;
/** /**
@ -36,8 +36,7 @@ public class FilterParser implements Aggregator.Parser {
} }
@Override @Override
public FilterAggregatorBuilder parse(String aggregationName, XContentParser parser, QueryParseContext context) public FilterAggregatorBuilder parse(String aggregationName, QueryParseContext context) throws IOException {
throws IOException {
QueryBuilder<?> filter = context.parseInnerQueryBuilder(); QueryBuilder<?> filter = context.parseInnerQueryBuilder();
if (filter == null) { if (filter == null) {

View File

@ -53,8 +53,9 @@ public class FiltersParser implements Aggregator.Parser {
} }
@Override @Override
public FiltersAggregatorBuilder parse(String aggregationName, XContentParser parser, QueryParseContext context) public FiltersAggregatorBuilder parse(String aggregationName, QueryParseContext context)
throws IOException { throws IOException {
XContentParser parser = context.parser();
List<FiltersAggregator.KeyedFilter> keyedFilters = null; List<FiltersAggregator.KeyedFilter> keyedFilters = null;
List<QueryBuilder<?>> nonKeyedFilters = null; List<QueryBuilder<?>> nonKeyedFilters = null;

View File

@ -18,9 +18,9 @@
*/ */
package org.elasticsearch.search.aggregations.bucket.global; package org.elasticsearch.search.aggregations.bucket.global;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.Aggregator;
import java.io.IOException; import java.io.IOException;
/** /**
@ -34,9 +34,8 @@ public class GlobalParser implements Aggregator.Parser {
} }
@Override @Override
public GlobalAggregatorBuilder parse(String aggregationName, XContentParser parser, QueryParseContext context) public GlobalAggregatorBuilder parse(String aggregationName, QueryParseContext context) throws IOException {
throws IOException { context.parser().nextToken();
parser.nextToken();
return new GlobalAggregatorBuilder(aggregationName); return new GlobalAggregatorBuilder(aggregationName);
} }

View File

@ -22,6 +22,7 @@ import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.Aggregator;
import java.io.IOException; import java.io.IOException;
/** /**
@ -35,12 +36,12 @@ public class NestedParser implements Aggregator.Parser {
} }
@Override @Override
public NestedAggregatorBuilder parse(String aggregationName, XContentParser parser, QueryParseContext context) public NestedAggregatorBuilder parse(String aggregationName, QueryParseContext context) throws IOException {
throws IOException {
String path = null; String path = null;
XContentParser.Token token; XContentParser.Token token;
String currentFieldName = null; String currentFieldName = null;
XContentParser parser = context.parser();
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) { if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName(); currentFieldName = parser.currentName();

View File

@ -22,6 +22,7 @@ import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.Aggregator;
import java.io.IOException; import java.io.IOException;
/** /**
@ -35,12 +36,12 @@ public class ReverseNestedParser implements Aggregator.Parser {
} }
@Override @Override
public ReverseNestedAggregatorBuilder parse(String aggregationName, XContentParser parser, public ReverseNestedAggregatorBuilder parse(String aggregationName, QueryParseContext context) throws IOException {
QueryParseContext context) throws IOException {
String path = null; String path = null;
XContentParser.Token token; XContentParser.Token token;
String currentFieldName = null; String currentFieldName = null;
XContentParser parser = context.parser();
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) { if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName(); currentFieldName = parser.currentName();

View File

@ -23,6 +23,7 @@ import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.Aggregator;
import java.io.IOException; import java.io.IOException;
/** /**
@ -36,13 +37,13 @@ public class SamplerParser implements Aggregator.Parser {
} }
@Override @Override
public SamplerAggregatorBuilder parse(String aggregationName, XContentParser parser, QueryParseContext context) public SamplerAggregatorBuilder parse(String aggregationName, QueryParseContext context) throws IOException {
throws IOException {
XContentParser.Token token; XContentParser.Token token;
String currentFieldName = null; String currentFieldName = null;
Integer shardSize = null; Integer shardSize = null;
XContentParser parser = context.parser();
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) { if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName(); currentFieldName = parser.currentName();

View File

@ -0,0 +1,168 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.metrics.scripted;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptParameterParser;
import org.elasticsearch.script.ScriptParameterParser.ScriptParameterValue;
import org.elasticsearch.search.aggregations.Aggregator;
import java.io.IOException;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
public class ScriptedMetricParser implements Aggregator.Parser {
public static final String INIT_SCRIPT = "init_script";
public static final String MAP_SCRIPT = "map_script";
public static final String COMBINE_SCRIPT = "combine_script";
public static final String REDUCE_SCRIPT = "reduce_script";
public static final ParseField INIT_SCRIPT_FIELD = new ParseField("init_script");
public static final ParseField MAP_SCRIPT_FIELD = new ParseField("map_script");
public static final ParseField COMBINE_SCRIPT_FIELD = new ParseField("combine_script");
public static final ParseField REDUCE_SCRIPT_FIELD = new ParseField("reduce_script");
public static final ParseField PARAMS_FIELD = new ParseField("params");
public static final ParseField REDUCE_PARAMS_FIELD = new ParseField("reduce_params");
public static final ParseField LANG_FIELD = new ParseField("lang");
@Override
public String type() {
return InternalScriptedMetric.TYPE.name();
}
@Override
public ScriptedMetricAggregatorBuilder parse(String aggregationName, QueryParseContext context) throws IOException {
Script initScript = null;
Script mapScript = null;
Script combineScript = null;
Script reduceScript = null;
Map<String, Object> params = null;
Map<String, Object> reduceParams = null;
XContentParser.Token token;
String currentFieldName = null;
Set<String> scriptParameters = new HashSet<>();
scriptParameters.add(INIT_SCRIPT);
scriptParameters.add(MAP_SCRIPT);
scriptParameters.add(COMBINE_SCRIPT);
scriptParameters.add(REDUCE_SCRIPT);
ScriptParameterParser scriptParameterParser = new ScriptParameterParser(scriptParameters);
XContentParser parser = context.parser();
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_OBJECT) {
if (context.parseFieldMatcher().match(currentFieldName, INIT_SCRIPT_FIELD)) {
initScript = Script.parse(parser, context.parseFieldMatcher());
} else if (context.parseFieldMatcher().match(currentFieldName, MAP_SCRIPT_FIELD)) {
mapScript = Script.parse(parser, context.parseFieldMatcher());
} else if (context.parseFieldMatcher().match(currentFieldName, COMBINE_SCRIPT_FIELD)) {
combineScript = Script.parse(parser, context.parseFieldMatcher());
} else if (context.parseFieldMatcher().match(currentFieldName, REDUCE_SCRIPT_FIELD)) {
reduceScript = Script.parse(parser, context.parseFieldMatcher());
} else if (context.parseFieldMatcher().match(currentFieldName, PARAMS_FIELD)) {
params = parser.map();
} else if (context.parseFieldMatcher().match(currentFieldName, REDUCE_PARAMS_FIELD)) {
reduceParams = parser.map();
} else {
throw new ParsingException(parser.getTokenLocation(),
"Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "].");
}
} else if (token.isValue()) {
if (!scriptParameterParser.token(currentFieldName, token, parser, context.parseFieldMatcher())) {
throw new ParsingException(parser.getTokenLocation(),
"Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "].");
}
} else {
throw new ParsingException(parser.getTokenLocation(), "Unexpected token " + token + " in [" + aggregationName + "].");
}
}
if (initScript == null) { // Didn't find anything using the new API so try using the old one instead
ScriptParameterValue scriptValue = scriptParameterParser.getScriptParameterValue(INIT_SCRIPT);
if (scriptValue != null) {
initScript = new Script(scriptValue.script(), scriptValue.scriptType(), scriptParameterParser.lang(), params);
}
} else if (initScript.getParams() != null) {
throw new ParsingException(parser.getTokenLocation(),
"init_script params are not supported. Parameters for the init_script must be specified in the params field on the scripted_metric aggregator not inside the init_script object");
}
if (mapScript == null) { // Didn't find anything using the new API so try using the old one instead
ScriptParameterValue scriptValue = scriptParameterParser.getScriptParameterValue(MAP_SCRIPT);
if (scriptValue != null) {
mapScript = new Script(scriptValue.script(), scriptValue.scriptType(), scriptParameterParser.lang(), params);
}
} else if (mapScript.getParams() != null) {
throw new ParsingException(parser.getTokenLocation(),
"map_script params are not supported. Parameters for the map_script must be specified in the params field on the scripted_metric aggregator not inside the map_script object");
}
if (combineScript == null) { // Didn't find anything using the new API so try using the old one instead
ScriptParameterValue scriptValue = scriptParameterParser.getScriptParameterValue(COMBINE_SCRIPT);
if (scriptValue != null) {
combineScript = new Script(scriptValue.script(), scriptValue.scriptType(), scriptParameterParser.lang(), params);
}
} else if (combineScript.getParams() != null) {
throw new ParsingException(parser.getTokenLocation(),
"combine_script params are not supported. Parameters for the combine_script must be specified in the params field on the scripted_metric aggregator not inside the combine_script object");
}
if (reduceScript == null) { // Didn't find anything using the new API so try using the old one instead
ScriptParameterValue scriptValue = scriptParameterParser.getScriptParameterValue(REDUCE_SCRIPT);
if (scriptValue != null) {
reduceScript = new Script(scriptValue.script(), scriptValue.scriptType(), scriptParameterParser.lang(), reduceParams);
}
}
if (mapScript == null) {
throw new ParsingException(parser.getTokenLocation(), "map_script field is required in [" + aggregationName + "].");
}
ScriptedMetricAggregatorBuilder factory = new ScriptedMetricAggregatorBuilder(aggregationName);
if (initScript != null) {
factory.initScript(initScript);
}
if (mapScript != null) {
factory.mapScript(mapScript);
}
if (combineScript != null) {
factory.combineScript(combineScript);
}
if (reduceScript != null) {
factory.reduceScript(reduceScript);
}
if (params != null) {
factory.params(params);
}
return factory;
}
@Override
public ScriptedMetricAggregatorBuilder getFactoryPrototypes() {
return ScriptedMetricAggregatorBuilder.PROTOTYPE;
}
}

View File

@ -44,11 +44,11 @@ public class TopHitsParser implements Aggregator.Parser {
} }
@Override @Override
public TopHitsAggregatorBuilder parse(String aggregationName, XContentParser parser, QueryParseContext context) public TopHitsAggregatorBuilder parse(String aggregationName, QueryParseContext context) throws IOException {
throws IOException {
TopHitsAggregatorBuilder factory = new TopHitsAggregatorBuilder(aggregationName); TopHitsAggregatorBuilder factory = new TopHitsAggregatorBuilder(aggregationName);
XContentParser.Token token; XContentParser.Token token;
String currentFieldName = null; String currentFieldName = null;
XContentParser parser = context.parser();
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) { if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName(); currentFieldName = parser.currentName();
@ -64,7 +64,7 @@ public class TopHitsParser implements Aggregator.Parser {
} else if (context.parseFieldMatcher().match(currentFieldName, SearchSourceBuilder.TRACK_SCORES_FIELD)) { } else if (context.parseFieldMatcher().match(currentFieldName, SearchSourceBuilder.TRACK_SCORES_FIELD)) {
factory.trackScores(parser.booleanValue()); factory.trackScores(parser.booleanValue());
} else if (context.parseFieldMatcher().match(currentFieldName, SearchSourceBuilder._SOURCE_FIELD)) { } else if (context.parseFieldMatcher().match(currentFieldName, SearchSourceBuilder._SOURCE_FIELD)) {
factory.fetchSource(FetchSourceContext.parse(parser, context)); factory.fetchSource(FetchSourceContext.parse(context));
} else if (context.parseFieldMatcher().match(currentFieldName, SearchSourceBuilder.FIELDS_FIELD)) { } else if (context.parseFieldMatcher().match(currentFieldName, SearchSourceBuilder.FIELDS_FIELD)) {
List<String> fieldNames = new ArrayList<>(); List<String> fieldNames = new ArrayList<>();
fieldNames.add(parser.text()); fieldNames.add(parser.text());
@ -77,7 +77,7 @@ public class TopHitsParser implements Aggregator.Parser {
} }
} else if (token == XContentParser.Token.START_OBJECT) { } else if (token == XContentParser.Token.START_OBJECT) {
if (context.parseFieldMatcher().match(currentFieldName, SearchSourceBuilder._SOURCE_FIELD)) { if (context.parseFieldMatcher().match(currentFieldName, SearchSourceBuilder._SOURCE_FIELD)) {
factory.fetchSource(FetchSourceContext.parse(parser, context)); factory.fetchSource(FetchSourceContext.parse(context));
} else if (context.parseFieldMatcher().match(currentFieldName, SearchSourceBuilder.SCRIPT_FIELDS_FIELD)) { } else if (context.parseFieldMatcher().match(currentFieldName, SearchSourceBuilder.SCRIPT_FIELDS_FIELD)) {
List<ScriptField> scriptFields = new ArrayList<>(); List<ScriptField> scriptFields = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
@ -157,7 +157,7 @@ public class TopHitsParser implements Aggregator.Parser {
List<SortBuilder<?>> sorts = SortBuilder.fromXContent(context); List<SortBuilder<?>> sorts = SortBuilder.fromXContent(context);
factory.sorts(sorts); factory.sorts(sorts);
} else if (context.parseFieldMatcher().match(currentFieldName, SearchSourceBuilder._SOURCE_FIELD)) { } else if (context.parseFieldMatcher().match(currentFieldName, SearchSourceBuilder._SOURCE_FIELD)) {
factory.fetchSource(FetchSourceContext.parse(parser, context)); factory.fetchSource(FetchSourceContext.parse(context));
} else { } else {
throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + " in [" + currentFieldName + "].", throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + " in [" + currentFieldName + "].",
parser.getTokenLocation()); parser.getTokenLocation());

View File

@ -24,7 +24,6 @@ import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.io.stream.Streamable;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.InternalAggregation.ReduceContext; import org.elasticsearch.search.aggregations.InternalAggregation.ReduceContext;
@ -59,15 +58,13 @@ public abstract class PipelineAggregator implements Streamable {
* *
* @param pipelineAggregatorName * @param pipelineAggregatorName
* The name of the pipeline aggregation * The name of the pipeline aggregation
* @param parser
* The xcontent parser
* @param context * @param context
* The search context * The search context
* @return The resolved pipeline aggregator factory * @return The resolved pipeline aggregator factory
* @throws java.io.IOException * @throws java.io.IOException
* When parsing fails * When parsing fails
*/ */
PipelineAggregatorBuilder<?> parse(String pipelineAggregatorName, XContentParser parser, QueryParseContext context) PipelineAggregatorBuilder<?> parse(String pipelineAggregatorName, QueryParseContext context)
throws IOException; throws IOException;
/** /**

View File

@ -25,6 +25,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import java.io.IOException; import java.io.IOException;
import java.text.ParseException; import java.text.ParseException;
import java.util.ArrayList; import java.util.ArrayList;
@ -44,8 +45,9 @@ public abstract class BucketMetricsParser implements PipelineAggregator.Parser {
} }
@Override @Override
public final BucketMetricsPipelineAggregatorBuilder<?> parse(String pipelineAggregatorName, XContentParser parser, public final BucketMetricsPipelineAggregatorBuilder<?> parse(String pipelineAggregatorName, QueryParseContext context)
QueryParseContext context) throws IOException { throws IOException {
XContentParser parser = context.parser();
XContentParser.Token token; XContentParser.Token token;
String currentFieldName = null; String currentFieldName = null;
String[] bucketsPaths = null; String[] bucketsPaths = null;

View File

@ -46,8 +46,8 @@ public class BucketScriptParser implements PipelineAggregator.Parser {
} }
@Override @Override
public BucketScriptPipelineAggregatorBuilder parse(String reducerName, XContentParser parser, public BucketScriptPipelineAggregatorBuilder parse(String reducerName, QueryParseContext context) throws IOException {
QueryParseContext context) throws IOException { XContentParser parser = context.parser();
XContentParser.Token token; XContentParser.Token token;
Script script = null; Script script = null;
String currentFieldName = null; String currentFieldName = null;

View File

@ -46,8 +46,8 @@ public class BucketSelectorParser implements PipelineAggregator.Parser {
} }
@Override @Override
public BucketSelectorPipelineAggregatorBuilder parse(String reducerName, XContentParser parser, public BucketSelectorPipelineAggregatorBuilder parse(String reducerName, QueryParseContext context) throws IOException {
QueryParseContext context) throws IOException { XContentParser parser = context.parser();
XContentParser.Token token; XContentParser.Token token;
Script script = null; Script script = null;
String currentFieldName = null; String currentFieldName = null;

View File

@ -24,6 +24,7 @@ import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
@ -40,8 +41,8 @@ public class CumulativeSumParser implements PipelineAggregator.Parser {
} }
@Override @Override
public CumulativeSumPipelineAggregatorBuilder parse(String pipelineAggregatorName, public CumulativeSumPipelineAggregatorBuilder parse(String pipelineAggregatorName, QueryParseContext context) throws IOException {
XContentParser parser, QueryParseContext context) throws IOException { XContentParser parser = context.parser();
XContentParser.Token token; XContentParser.Token token;
String currentFieldName = null; String currentFieldName = null;
String[] bucketsPaths = null; String[] bucketsPaths = null;

View File

@ -25,6 +25,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
@ -41,8 +42,8 @@ public class DerivativeParser implements PipelineAggregator.Parser {
} }
@Override @Override
public DerivativePipelineAggregatorBuilder parse(String pipelineAggregatorName, XContentParser parser, public DerivativePipelineAggregatorBuilder parse(String pipelineAggregatorName, QueryParseContext context) throws IOException {
QueryParseContext context) throws IOException { XContentParser parser = context.parser();
XContentParser.Token token; XContentParser.Token token;
String currentFieldName = null; String currentFieldName = null;
String[] bucketsPaths = null; String[] bucketsPaths = null;

View File

@ -56,8 +56,8 @@ public class MovAvgParser implements PipelineAggregator.Parser {
} }
@Override @Override
public MovAvgPipelineAggregatorBuilder parse(String pipelineAggregatorName, XContentParser parser, public MovAvgPipelineAggregatorBuilder parse(String pipelineAggregatorName, QueryParseContext context) throws IOException {
QueryParseContext context) throws IOException { XContentParser parser = context.parser();
XContentParser.Token token; XContentParser.Token token;
String currentFieldName = null; String currentFieldName = null;
String[] bucketsPaths = null; String[] bucketsPaths = null;

View File

@ -25,6 +25,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
@ -41,8 +42,8 @@ public class SerialDiffParser implements PipelineAggregator.Parser {
} }
@Override @Override
public SerialDiffPipelineAggregatorBuilder parse(String reducerName, XContentParser parser, public SerialDiffPipelineAggregatorBuilder parse(String reducerName, QueryParseContext context) throws IOException {
QueryParseContext context) throws IOException { XContentParser parser = context.parser();
XContentParser.Token token; XContentParser.Token token;
String currentFieldName = null; String currentFieldName = null;
String[] bucketsPaths = null; String[] bucketsPaths = null;

View File

@ -84,9 +84,10 @@ public abstract class AbstractValuesSourceParser<VS extends ValuesSource>
} }
@Override @Override
public final ValuesSourceAggregatorBuilder<VS, ?> parse(String aggregationName, XContentParser parser, QueryParseContext context) public final ValuesSourceAggregatorBuilder<VS, ?> parse(String aggregationName, QueryParseContext context)
throws IOException { throws IOException {
XContentParser parser = context.parser();
String field = null; String field = null;
Script script = null; Script script = null;
ValueType valueType = null; ValueType valueType = null;

View File

@ -101,10 +101,10 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
public static final ParseField PROFILE_FIELD = new ParseField("profile"); public static final ParseField PROFILE_FIELD = new ParseField("profile");
public static final ParseField SEARCH_AFTER = new ParseField("search_after"); public static final ParseField SEARCH_AFTER = new ParseField("search_after");
public static SearchSourceBuilder fromXContent(XContentParser parser, QueryParseContext context, AggregatorParsers aggParsers, public static SearchSourceBuilder fromXContent(QueryParseContext context, AggregatorParsers aggParsers,
Suggesters suggesters) throws IOException { Suggesters suggesters) throws IOException {
SearchSourceBuilder builder = new SearchSourceBuilder(); SearchSourceBuilder builder = new SearchSourceBuilder();
builder.parseXContent(parser, context, aggParsers, suggesters); builder.parseXContent(context, aggParsers, suggesters);
return builder; return builder;
} }
@ -979,11 +979,12 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
/** /**
* Parse some xContent into this SearchSourceBuilder, overwriting any values specified in the xContent. Use this if you need to set up * Parse some xContent into this SearchSourceBuilder, overwriting any values specified in the xContent. Use this if you need to set up
* different defaults than a regular SearchSourceBuilder would have and use * different defaults than a regular SearchSourceBuilder would have and use
* {@link #fromXContent(XContentParser, QueryParseContext, AggregatorParsers, Suggesters)} if you have normal defaults. * {@link #fromXContent(QueryParseContext, AggregatorParsers, Suggesters)} if you have normal defaults.
*/ */
public void parseXContent(XContentParser parser, QueryParseContext context, AggregatorParsers aggParsers, Suggesters suggesters) public void parseXContent(QueryParseContext context, AggregatorParsers aggParsers, Suggesters suggesters)
throws IOException { throws IOException {
XContentParser parser = context.parser();
XContentParser.Token token = parser.currentToken(); XContentParser.Token token = parser.currentToken();
String currentFieldName = null; String currentFieldName = null;
if (token != XContentParser.Token.START_OBJECT && (token = parser.nextToken()) != XContentParser.Token.START_OBJECT) { if (token != XContentParser.Token.START_OBJECT && (token = parser.nextToken()) != XContentParser.Token.START_OBJECT) {
@ -1011,7 +1012,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
} else if (context.parseFieldMatcher().match(currentFieldName, TRACK_SCORES_FIELD)) { } else if (context.parseFieldMatcher().match(currentFieldName, TRACK_SCORES_FIELD)) {
trackScores = parser.booleanValue(); trackScores = parser.booleanValue();
} else if (context.parseFieldMatcher().match(currentFieldName, _SOURCE_FIELD)) { } else if (context.parseFieldMatcher().match(currentFieldName, _SOURCE_FIELD)) {
fetchSourceContext = FetchSourceContext.parse(parser, context); fetchSourceContext = FetchSourceContext.parse(context);
} else if (context.parseFieldMatcher().match(currentFieldName, FIELDS_FIELD)) { } else if (context.parseFieldMatcher().match(currentFieldName, FIELDS_FIELD)) {
field(parser.text()); field(parser.text());
} else if (context.parseFieldMatcher().match(currentFieldName, SORT_FIELD)) { } else if (context.parseFieldMatcher().match(currentFieldName, SORT_FIELD)) {
@ -1028,11 +1029,11 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
} else if (context.parseFieldMatcher().match(currentFieldName, POST_FILTER_FIELD)) { } else if (context.parseFieldMatcher().match(currentFieldName, POST_FILTER_FIELD)) {
postQueryBuilder = context.parseInnerQueryBuilder(); postQueryBuilder = context.parseInnerQueryBuilder();
} else if (context.parseFieldMatcher().match(currentFieldName, _SOURCE_FIELD)) { } else if (context.parseFieldMatcher().match(currentFieldName, _SOURCE_FIELD)) {
fetchSourceContext = FetchSourceContext.parse(parser, context); fetchSourceContext = FetchSourceContext.parse(context);
} else if (context.parseFieldMatcher().match(currentFieldName, SCRIPT_FIELDS_FIELD)) { } else if (context.parseFieldMatcher().match(currentFieldName, SCRIPT_FIELDS_FIELD)) {
scriptFields = new ArrayList<>(); scriptFields = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
scriptFields.add(new ScriptField(parser, context)); scriptFields.add(new ScriptField(context));
} }
} else if (context.parseFieldMatcher().match(currentFieldName, INDICES_BOOST_FIELD)) { } else if (context.parseFieldMatcher().match(currentFieldName, INDICES_BOOST_FIELD)) {
indexBoost = new ObjectFloatHashMap<String>(); indexBoost = new ObjectFloatHashMap<String>();
@ -1047,11 +1048,11 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
} }
} }
} else if (context.parseFieldMatcher().match(currentFieldName, AGGREGATIONS_FIELD)) { } else if (context.parseFieldMatcher().match(currentFieldName, AGGREGATIONS_FIELD)) {
aggregations = aggParsers.parseAggregators(parser, context); aggregations = aggParsers.parseAggregators(context);
} else if (context.parseFieldMatcher().match(currentFieldName, HIGHLIGHT_FIELD)) { } else if (context.parseFieldMatcher().match(currentFieldName, HIGHLIGHT_FIELD)) {
highlightBuilder = HighlightBuilder.fromXContent(context); highlightBuilder = HighlightBuilder.fromXContent(context);
} else if (context.parseFieldMatcher().match(currentFieldName, INNER_HITS_FIELD)) { } else if (context.parseFieldMatcher().match(currentFieldName, INNER_HITS_FIELD)) {
innerHitsBuilder = InnerHitsBuilder.fromXContent(parser, context); innerHitsBuilder = InnerHitsBuilder.fromXContent(context);
} else if (context.parseFieldMatcher().match(currentFieldName, SUGGEST_FIELD)) { } else if (context.parseFieldMatcher().match(currentFieldName, SUGGEST_FIELD)) {
suggestBuilder = SuggestBuilder.fromXContent(context, suggesters); suggestBuilder = SuggestBuilder.fromXContent(context, suggesters);
} else if (context.parseFieldMatcher().match(currentFieldName, SORT_FIELD)) { } else if (context.parseFieldMatcher().match(currentFieldName, SORT_FIELD)) {
@ -1103,7 +1104,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
} }
} }
} else if (context.parseFieldMatcher().match(currentFieldName, _SOURCE_FIELD)) { } else if (context.parseFieldMatcher().match(currentFieldName, _SOURCE_FIELD)) {
fetchSourceContext = FetchSourceContext.parse(parser, context); fetchSourceContext = FetchSourceContext.parse(context);
} else if (context.parseFieldMatcher().match(currentFieldName, SEARCH_AFTER)) { } else if (context.parseFieldMatcher().match(currentFieldName, SEARCH_AFTER)) {
searchAfterBuilder = SearchAfterBuilder.PROTOTYPE.fromXContent(parser, context.parseFieldMatcher()); searchAfterBuilder = SearchAfterBuilder.PROTOTYPE.fromXContent(parser, context.parseFieldMatcher());
} else { } else {
@ -1290,8 +1291,9 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
out.writeBoolean(ignoreFailure); out.writeBoolean(ignoreFailure);
} }
public ScriptField(XContentParser parser, QueryParseContext context) throws IOException { public ScriptField(QueryParseContext context) throws IOException {
boolean ignoreFailure = false; boolean ignoreFailure = false;
XContentParser parser = context.parser();
String scriptFieldName = parser.currentName(); String scriptFieldName = parser.currentName();
Script script = null; Script script = null;
@ -1373,6 +1375,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
} }
} }
@Override
public int hashCode() { public int hashCode() {
return Objects.hash(aggregations, explain, fetchSourceContext, fieldDataFields, fieldNames, from, return Objects.hash(aggregations, explain, fetchSourceContext, fieldDataFields, fieldNames, from,
highlightBuilder, indexBoost, innerHitsBuilder, minScore, postQueryBuilder, queryBuilder, rescoreBuilders, scriptFields, highlightBuilder, indexBoost, innerHitsBuilder, minScore, postQueryBuilder, queryBuilder, rescoreBuilders, scriptFields,

View File

@ -50,9 +50,9 @@ public class FetchSourceContext implements Streamable, ToXContent {
private String[] includes; private String[] includes;
private String[] excludes; private String[] excludes;
public static FetchSourceContext parse(XContentParser parser, QueryParseContext context) throws IOException { public static FetchSourceContext parse(QueryParseContext context) throws IOException {
FetchSourceContext fetchSourceContext = new FetchSourceContext(); FetchSourceContext fetchSourceContext = new FetchSourceContext();
fetchSourceContext.fromXContent(parser, context); fetchSourceContext.fromXContent(context);
return fetchSourceContext; return fetchSourceContext;
} }
@ -147,7 +147,8 @@ public class FetchSourceContext implements Streamable, ToXContent {
return null; return null;
} }
public void fromXContent(XContentParser parser, QueryParseContext context) throws IOException { public void fromXContent(QueryParseContext context) throws IOException {
XContentParser parser = context.parser();
XContentParser.Token token = parser.currentToken(); XContentParser.Token token = parser.currentToken();
boolean fetchSource = true; boolean fetchSource = true;
String[] includes = Strings.EMPTY_ARRAY; String[] includes = Strings.EMPTY_ARRAY;

View File

@ -21,18 +21,12 @@ package org.elasticsearch.search.highlight;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.search.vectorhighlight.SimpleBoundaryScanner; import org.apache.lucene.search.vectorhighlight.SimpleBoundaryScanner;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser.NamedObjectParser; import org.elasticsearch.common.xcontent.ObjectParser.NamedObjectParser;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilder;

View File

@ -93,7 +93,7 @@ public abstract class SortBuilder<T extends SortBuilder<?>> extends ToXContentTo
if (token == XContentParser.Token.START_ARRAY) { if (token == XContentParser.Token.START_ARRAY) {
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
if (token == XContentParser.Token.START_OBJECT) { if (token == XContentParser.Token.START_OBJECT) {
parseCompoundSortField(parser, context, sortFields); parseCompoundSortField(context, sortFields);
} else if (token == XContentParser.Token.VALUE_STRING) { } else if (token == XContentParser.Token.VALUE_STRING) {
String fieldName = parser.text(); String fieldName = parser.text();
sortFields.add(fieldOrScoreSort(fieldName)); sortFields.add(fieldOrScoreSort(fieldName));
@ -106,7 +106,7 @@ public abstract class SortBuilder<T extends SortBuilder<?>> extends ToXContentTo
String fieldName = parser.text(); String fieldName = parser.text();
sortFields.add(fieldOrScoreSort(fieldName)); sortFields.add(fieldOrScoreSort(fieldName));
} else if (token == XContentParser.Token.START_OBJECT) { } else if (token == XContentParser.Token.START_OBJECT) {
parseCompoundSortField(parser, context, sortFields); parseCompoundSortField(context, sortFields);
} else { } else {
throw new IllegalArgumentException("malformed sort format, either start with array, object, or an actual string"); throw new IllegalArgumentException("malformed sort format, either start with array, object, or an actual string");
} }
@ -121,9 +121,10 @@ public abstract class SortBuilder<T extends SortBuilder<?>> extends ToXContentTo
} }
} }
private static void parseCompoundSortField(XContentParser parser, QueryParseContext context, List<SortBuilder<?>> sortFields) private static void parseCompoundSortField(QueryParseContext context, List<SortBuilder<?>> sortFields)
throws IOException { throws IOException {
XContentParser.Token token; XContentParser.Token token;
XContentParser parser = context.parser();
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) { if (token == XContentParser.Token.FIELD_NAME) {
String fieldName = parser.currentName(); String fieldName = parser.currentName();

View File

@ -35,7 +35,6 @@ import org.elasticsearch.common.lucene.search.function.FiltersFunctionScoreQuery
import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery; import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery;
import org.elasticsearch.common.lucene.search.function.WeightFactorFunction; import org.elasticsearch.common.lucene.search.function.WeightFactorFunction;
import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.query.AbstractQueryBuilder; import org.elasticsearch.index.query.AbstractQueryBuilder;
import org.elasticsearch.index.query.AbstractQueryTestCase; import org.elasticsearch.index.query.AbstractQueryTestCase;
@ -725,9 +724,9 @@ public class FunctionScoreQueryBuilderTests extends AbstractQueryTestCase<Functi
return NAME; return NAME;
} }
public static RandomScoreFunctionBuilder fromXContent(QueryParseContext parseContext, XContentParser parser) public static RandomScoreFunctionBuilder fromXContent(QueryParseContext parseContext)
throws IOException, ParsingException { throws IOException, ParsingException {
RandomScoreFunctionBuilder builder = RandomScoreFunctionBuilder.fromXContent(parseContext, parser); RandomScoreFunctionBuilder builder = RandomScoreFunctionBuilder.fromXContent(parseContext);
RandomScoreFunctionBuilderWithFixedSeed replacement = new RandomScoreFunctionBuilderWithFixedSeed(); RandomScoreFunctionBuilderWithFixedSeed replacement = new RandomScoreFunctionBuilderWithFixedSeed();
replacement.seed(builder.getSeed()); replacement.seed(builder.getSeed());
return replacement; return replacement;

View File

@ -30,7 +30,6 @@ import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.query.AbstractQueryTestCase;
import org.elasticsearch.index.query.MatchQueryBuilder; import org.elasticsearch.index.query.MatchQueryBuilder;
import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.indices.query.IndicesQueriesRegistry;
@ -40,8 +39,6 @@ import org.elasticsearch.search.SearchModule;
import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.fetch.source.FetchSourceContext; import org.elasticsearch.search.fetch.source.FetchSourceContext;
import org.elasticsearch.search.highlight.HighlightBuilderTests; import org.elasticsearch.search.highlight.HighlightBuilderTests;
import org.elasticsearch.search.sort.FieldSortBuilder;
import org.elasticsearch.search.sort.ScriptSortBuilder;
import org.elasticsearch.search.sort.SortBuilder; import org.elasticsearch.search.sort.SortBuilder;
import org.elasticsearch.search.sort.SortBuilders; import org.elasticsearch.search.sort.SortBuilders;
import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.search.sort.SortOrder;
@ -101,7 +98,7 @@ public class InnerHitBuilderTests extends ESTestCase {
XContentParser parser = XContentHelper.createParser(builder.bytes()); XContentParser parser = XContentHelper.createParser(builder.bytes());
context.reset(parser); context.reset(parser);
InnerHitBuilder secondInnerHits = InnerHitBuilder.fromXContent(parser, context); InnerHitBuilder secondInnerHits = InnerHitBuilder.fromXContent(context);
assertThat(innerHit, not(sameInstance(secondInnerHits))); assertThat(innerHit, not(sameInstance(secondInnerHits)));
assertThat(innerHit, equalTo(secondInnerHits)); assertThat(innerHit, equalTo(secondInnerHits));
assertThat(innerHit.hashCode(), equalTo(secondInnerHits.hashCode())); assertThat(innerHit.hashCode(), equalTo(secondInnerHits.hashCode()));

View File

@ -30,29 +30,14 @@ import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.query.MatchQueryBuilder;
import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.indices.query.IndicesQueriesRegistry;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.search.SearchModule; import org.elasticsearch.search.SearchModule;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.fetch.source.FetchSourceContext;
import org.elasticsearch.search.highlight.HighlightBuilderTests;
import org.elasticsearch.search.sort.FieldSortBuilder;
import org.elasticsearch.search.sort.SortBuilder;
import org.elasticsearch.search.sort.SortBuilders;
import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.function.Supplier;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.not;
@ -100,7 +85,7 @@ public class InnerHitsBuilderTests extends ESTestCase {
XContentParser parser = XContentHelper.createParser(builder.bytes()); XContentParser parser = XContentHelper.createParser(builder.bytes());
context.reset(parser); context.reset(parser);
parser.nextToken(); parser.nextToken();
InnerHitsBuilder secondInnerHits = InnerHitsBuilder.fromXContent(parser, context); InnerHitsBuilder secondInnerHits = InnerHitsBuilder.fromXContent(context);
assertThat(innerHits, not(sameInstance(secondInnerHits))); assertThat(innerHits, not(sameInstance(secondInnerHits)));
assertThat(innerHits, equalTo(secondInnerHits)); assertThat(innerHits, equalTo(secondInnerHits));
assertThat(innerHits.hashCode(), equalTo(secondInnerHits.hashCode())); assertThat(innerHits.hashCode(), equalTo(secondInnerHits.hashCode()));

View File

@ -69,7 +69,7 @@ public class AggregationCollectorTests extends ESSingleNodeTestCase {
aggParser.nextToken(); aggParser.nextToken();
SearchContext searchContext = createSearchContext(index); SearchContext searchContext = createSearchContext(index);
AggregationContext aggContext = new AggregationContext(searchContext); AggregationContext aggContext = new AggregationContext(searchContext);
final AggregatorFactories factories = parser.parseAggregators(aggParser, parseContext).build(aggContext, null); final AggregatorFactories factories = parser.parseAggregators(parseContext).build(aggContext, null);
final Aggregator[] aggregators = factories.createTopLevelAggregators(); final Aggregator[] aggregators = factories.createTopLevelAggregators();
assertEquals(1, aggregators.length); assertEquals(1, aggregators.length);
return aggregators[0].needsScores(); return aggregators[0].needsScores();

View File

@ -214,7 +214,7 @@ public class AggregatorParsingTests extends ESTestCase {
parseContext.reset(parser); parseContext.reset(parser);
parseContext.parseFieldMatcher(parseFieldMatcher); parseContext.parseFieldMatcher(parseFieldMatcher);
assertSame(XContentParser.Token.START_OBJECT, parser.nextToken()); assertSame(XContentParser.Token.START_OBJECT, parser.nextToken());
aggParsers.parseAggregators(parser, parseContext); aggParsers.parseAggregators(parseContext);
fail(); fail();
} catch (ParsingException e) { } catch (ParsingException e) {
assertThat(e.toString(), containsString("Found two aggregation type definitions in [in_stock]: [filter] and [terms]")); assertThat(e.toString(), containsString("Found two aggregation type definitions in [in_stock]: [filter] and [terms]"));
@ -250,7 +250,7 @@ public class AggregatorParsingTests extends ESTestCase {
parseContext.reset(parser); parseContext.reset(parser);
parseContext.parseFieldMatcher(parseFieldMatcher); parseContext.parseFieldMatcher(parseFieldMatcher);
assertSame(XContentParser.Token.START_OBJECT, parser.nextToken()); assertSame(XContentParser.Token.START_OBJECT, parser.nextToken());
aggParsers.parseAggregators(parser, parseContext); aggParsers.parseAggregators(parseContext);
fail(); fail();
} catch (ParsingException e) { } catch (ParsingException e) {
assertThat(e.toString(), containsString("Found two sub aggregation definitions under [by_date]")); assertThat(e.toString(), containsString("Found two sub aggregation definitions under [by_date]"));
@ -290,7 +290,7 @@ public class AggregatorParsingTests extends ESTestCase {
parseContext.reset(parser); parseContext.reset(parser);
parseContext.parseFieldMatcher(parseFieldMatcher); parseContext.parseFieldMatcher(parseFieldMatcher);
assertSame(XContentParser.Token.START_OBJECT, parser.nextToken()); assertSame(XContentParser.Token.START_OBJECT, parser.nextToken());
aggParsers.parseAggregators(parser, parseContext); aggParsers.parseAggregators(parseContext);
fail(); fail();
} catch (ParsingException e) { } catch (ParsingException e) {
assertThat(e.toString(), containsString("Invalid aggregation name [" + name + "]")); assertThat(e.toString(), containsString("Invalid aggregation name [" + name + "]"));
@ -318,7 +318,7 @@ public class AggregatorParsingTests extends ESTestCase {
parseContext.reset(parser); parseContext.reset(parser);
parseContext.parseFieldMatcher(parseFieldMatcher); parseContext.parseFieldMatcher(parseFieldMatcher);
assertSame(XContentParser.Token.START_OBJECT, parser.nextToken()); assertSame(XContentParser.Token.START_OBJECT, parser.nextToken());
aggParsers.parseAggregators(parser, parseContext); aggParsers.parseAggregators(parseContext);
fail(); fail();
} catch (IllegalArgumentException e) { } catch (IllegalArgumentException e) {
assertThat(e.toString(), containsString("Two sibling aggregations cannot have the same name: [" + name + "]")); assertThat(e.toString(), containsString("Two sibling aggregations cannot have the same name: [" + name + "]"));
@ -348,7 +348,7 @@ public class AggregatorParsingTests extends ESTestCase {
parseContext.reset(parser); parseContext.reset(parser);
parseContext.parseFieldMatcher(parseFieldMatcher); parseContext.parseFieldMatcher(parseFieldMatcher);
assertSame(XContentParser.Token.START_OBJECT, parser.nextToken()); assertSame(XContentParser.Token.START_OBJECT, parser.nextToken());
aggParsers.parseAggregators(parser, parseContext); aggParsers.parseAggregators(parseContext);
fail(); fail();
} catch (ParsingException e) { } catch (ParsingException e) {
// All Good // All Good
@ -378,7 +378,7 @@ public class AggregatorParsingTests extends ESTestCase {
parseContext.reset(parser); parseContext.reset(parser);
parseContext.parseFieldMatcher(parseFieldMatcher); parseContext.parseFieldMatcher(parseFieldMatcher);
assertSame(XContentParser.Token.START_OBJECT, parser.nextToken()); assertSame(XContentParser.Token.START_OBJECT, parser.nextToken());
aggParsers.parseAggregators(parser, parseContext); aggParsers.parseAggregators(parseContext);
fail(); fail();
} catch (ParsingException e) { } catch (ParsingException e) {
// All Good // All Good

View File

@ -235,7 +235,7 @@ public abstract class BaseAggregationTestCase<AB extends AggregatorBuilder<AB>>
assertSame(XContentParser.Token.FIELD_NAME, parser.nextToken()); assertSame(XContentParser.Token.FIELD_NAME, parser.nextToken());
assertEquals(testAgg.type.name(), parser.currentName()); assertEquals(testAgg.type.name(), parser.currentName());
assertSame(XContentParser.Token.START_OBJECT, parser.nextToken()); assertSame(XContentParser.Token.START_OBJECT, parser.nextToken());
AggregatorBuilder<?> newAgg = aggParsers.parser(testAgg.getType(), parser).parse(testAgg.name, parser, parseContext); AggregatorBuilder newAgg = aggParsers.parser(testAgg.getType(), parser).parse(testAgg.name, parseContext);
assertSame(XContentParser.Token.END_OBJECT, parser.currentToken()); assertSame(XContentParser.Token.END_OBJECT, parser.currentToken());
assertSame(XContentParser.Token.END_OBJECT, parser.nextToken()); assertSame(XContentParser.Token.END_OBJECT, parser.nextToken());
assertSame(XContentParser.Token.END_OBJECT, parser.nextToken()); assertSame(XContentParser.Token.END_OBJECT, parser.nextToken());

View File

@ -237,8 +237,8 @@ public abstract class BasePipelineAggregationTestCase<AF extends PipelineAggrega
assertSame(XContentParser.Token.FIELD_NAME, parser.nextToken()); assertSame(XContentParser.Token.FIELD_NAME, parser.nextToken());
assertEquals(testAgg.type(), parser.currentName()); assertEquals(testAgg.type(), parser.currentName());
assertSame(XContentParser.Token.START_OBJECT, parser.nextToken()); assertSame(XContentParser.Token.START_OBJECT, parser.nextToken());
PipelineAggregatorBuilder<?> newAgg = aggParsers.pipelineParser(testAgg.getWriteableName(), parser).parse(testAgg.name(), parser, PipelineAggregatorBuilder<?> newAgg = aggParsers.pipelineParser(testAgg.getWriteableName(), parser)
parseContext); .parse(testAgg.name(), parseContext);
assertSame(XContentParser.Token.END_OBJECT, parser.currentToken()); assertSame(XContentParser.Token.END_OBJECT, parser.currentToken());
assertSame(XContentParser.Token.END_OBJECT, parser.nextToken()); assertSame(XContentParser.Token.END_OBJECT, parser.nextToken());
assertSame(XContentParser.Token.END_OBJECT, parser.nextToken()); assertSame(XContentParser.Token.END_OBJECT, parser.nextToken());

View File

@ -37,7 +37,7 @@ public class GeoHashGridParserTests extends ESTestCase {
assertSame(XContentParser.Token.START_OBJECT, token); assertSame(XContentParser.Token.START_OBJECT, token);
GeoHashGridParser parser = new GeoHashGridParser(); GeoHashGridParser parser = new GeoHashGridParser();
// can create a factory // can create a factory
assertNotNull(parser.parse("geohash_grid", stParser, parseContext)); assertNotNull(parser.parse("geohash_grid", parseContext));
} }
public void testParseValidFromStrings() throws Exception { public void testParseValidFromStrings() throws Exception {
@ -51,7 +51,7 @@ public class GeoHashGridParserTests extends ESTestCase {
assertSame(XContentParser.Token.START_OBJECT, token); assertSame(XContentParser.Token.START_OBJECT, token);
GeoHashGridParser parser = new GeoHashGridParser(); GeoHashGridParser parser = new GeoHashGridParser();
// can create a factory // can create a factory
assertNotNull(parser.parse("geohash_grid", stParser, parseContext)); assertNotNull(parser.parse("geohash_grid", parseContext));
} }
public void testParseErrorOnNonIntPrecision() throws Exception { public void testParseErrorOnNonIntPrecision() throws Exception {
@ -63,7 +63,7 @@ public class GeoHashGridParserTests extends ESTestCase {
assertSame(XContentParser.Token.START_OBJECT, token); assertSame(XContentParser.Token.START_OBJECT, token);
GeoHashGridParser parser = new GeoHashGridParser(); GeoHashGridParser parser = new GeoHashGridParser();
try { try {
parser.parse("geohash_grid", stParser, parseContext); parser.parse("geohash_grid", parseContext);
fail(); fail();
} catch (NumberFormatException ex) { } catch (NumberFormatException ex) {
assertEquals("For input string: \"2.0\"", ex.getMessage()); assertEquals("For input string: \"2.0\"", ex.getMessage());
@ -79,7 +79,7 @@ public class GeoHashGridParserTests extends ESTestCase {
assertSame(XContentParser.Token.START_OBJECT, token); assertSame(XContentParser.Token.START_OBJECT, token);
GeoHashGridParser parser = new GeoHashGridParser(); GeoHashGridParser parser = new GeoHashGridParser();
try { try {
parser.parse("geohash_grid", stParser, parseContext); parser.parse("geohash_grid", parseContext);
fail(); fail();
} catch (ParsingException ex) { } catch (ParsingException ex) {
assertEquals("Unexpected token VALUE_BOOLEAN [precision] in [geohash_grid].", ex.getMessage()); assertEquals("Unexpected token VALUE_BOOLEAN [precision] in [geohash_grid].", ex.getMessage());
@ -95,7 +95,7 @@ public class GeoHashGridParserTests extends ESTestCase {
assertSame(XContentParser.Token.START_OBJECT, token); assertSame(XContentParser.Token.START_OBJECT, token);
GeoHashGridParser parser = new GeoHashGridParser(); GeoHashGridParser parser = new GeoHashGridParser();
try { try {
parser.parse("geohash_grid", stParser, parseContext); parser.parse("geohash_grid", parseContext);
fail(); fail();
} catch (IllegalArgumentException ex) { } catch (IllegalArgumentException ex) {
assertEquals("Invalid geohash aggregation precision of 13. Must be between 1 and 12.", ex.getMessage()); assertEquals("Invalid geohash aggregation precision of 13. Must be between 1 and 12.", ex.getMessage());

View File

@ -253,7 +253,7 @@ public class SignificanceHeuristicTests extends ESTestCase {
parseContext.reset(stParser); parseContext.reset(stParser);
parseContext.parseFieldMatcher(ParseFieldMatcher.STRICT); parseContext.parseFieldMatcher(ParseFieldMatcher.STRICT);
stParser.nextToken(); stParser.nextToken();
new SignificantTermsParser(heuristicParserMapper, registry).parse("testagg", stParser, parseContext); new SignificantTermsParser(heuristicParserMapper, registry).parse("testagg", parseContext);
fail(); fail();
} catch (ElasticsearchParseException e) { } catch (ElasticsearchParseException e) {
assertTrue(e.getMessage().contains(expectedError)); assertTrue(e.getMessage().contains(expectedError));
@ -277,7 +277,7 @@ public class SignificanceHeuristicTests extends ESTestCase {
parseContext.parseFieldMatcher(ParseFieldMatcher.STRICT); parseContext.parseFieldMatcher(ParseFieldMatcher.STRICT);
stParser.nextToken(); stParser.nextToken();
SignificantTermsAggregatorBuilder aggregatorFactory = (SignificantTermsAggregatorBuilder) new SignificantTermsParser( SignificantTermsAggregatorBuilder aggregatorFactory = (SignificantTermsAggregatorBuilder) new SignificantTermsParser(
heuristicParserMapper, registry).parse("testagg", stParser, parseContext); heuristicParserMapper, registry).parse("testagg", parseContext);
stParser.nextToken(); stParser.nextToken();
assertThat(aggregatorFactory.getBucketCountThresholds().getMinDocCount(), equalTo(200L)); assertThat(aggregatorFactory.getBucketCountThresholds().getMinDocCount(), equalTo(200L));
assertThat(stParser.currentToken(), equalTo(null)); assertThat(stParser.currentToken(), equalTo(null));

View File

@ -177,7 +177,7 @@ public class TopHitsTests extends BaseAggregationTestCase<TopHitsAggregatorBuild
parseContext.reset(parser); parseContext.reset(parser);
parseContext.parseFieldMatcher(parseFieldMatcher); parseContext.parseFieldMatcher(parseFieldMatcher);
assertSame(XContentParser.Token.START_OBJECT, parser.nextToken()); assertSame(XContentParser.Token.START_OBJECT, parser.nextToken());
aggParsers.parseAggregators(parser, parseContext); aggParsers.parseAggregators(parseContext);
fail(); fail();
} catch (AggregationInitializationException e) { } catch (AggregationInitializationException e) {
assertThat(e.toString(), containsString("Aggregator [top_tags_hits] of type [top_hits] cannot accept sub-aggregations")); assertThat(e.toString(), containsString("Aggregator [top_tags_hits] of type [top_hits] cannot accept sub-aggregations"));

View File

@ -23,8 +23,8 @@ import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.search.aggregations.BasePipelineAggregationTestCase; import org.elasticsearch.search.aggregations.BasePipelineAggregationTestCase;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilder;
import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilder;
import org.elasticsearch.search.aggregations.pipeline.movavg.MovAvgPipelineAggregatorBuilder; import org.elasticsearch.search.aggregations.pipeline.movavg.MovAvgPipelineAggregatorBuilder;
import org.elasticsearch.search.aggregations.pipeline.movavg.models.EwmaModel; import org.elasticsearch.search.aggregations.pipeline.movavg.models.EwmaModel;
import org.elasticsearch.search.aggregations.pipeline.movavg.models.HoltLinearModel; import org.elasticsearch.search.aggregations.pipeline.movavg.models.HoltLinearModel;
@ -116,7 +116,7 @@ public class MovAvgTests extends BasePipelineAggregationTestCase<MovAvgPipelineA
assertSame(XContentParser.Token.FIELD_NAME, parser.nextToken()); assertSame(XContentParser.Token.FIELD_NAME, parser.nextToken());
assertEquals(expected.type(), parser.currentName()); assertEquals(expected.type(), parser.currentName());
assertSame(XContentParser.Token.START_OBJECT, parser.nextToken()); assertSame(XContentParser.Token.START_OBJECT, parser.nextToken());
PipelineAggregatorBuilder<?> newAgg = aggParsers.pipelineParser(expected.getWriteableName(), parser).parse(expected.name(), parser, PipelineAggregatorBuilder<?> newAgg = aggParsers.pipelineParser(expected.getWriteableName(), parser).parse(expected.name(),
parseContext); parseContext);
assertSame(XContentParser.Token.END_OBJECT, parser.currentToken()); assertSame(XContentParser.Token.END_OBJECT, parser.currentToken());
assertSame(XContentParser.Token.END_OBJECT, parser.nextToken()); assertSame(XContentParser.Token.END_OBJECT, parser.nextToken());

View File

@ -454,7 +454,7 @@ public class SearchSourceBuilderTests extends ESTestCase {
if (randomBoolean()) { if (randomBoolean()) {
parser.nextToken(); // sometimes we move it on the START_OBJECT to test the embedded case parser.nextToken(); // sometimes we move it on the START_OBJECT to test the embedded case
} }
SearchSourceBuilder newBuilder = SearchSourceBuilder.fromXContent(parser, parseContext, aggParsers, suggesters); SearchSourceBuilder newBuilder = SearchSourceBuilder.fromXContent(parseContext, aggParsers, suggesters);
assertNull(parser.nextToken()); assertNull(parser.nextToken());
assertEquals(testBuilder, newBuilder); assertEquals(testBuilder, newBuilder);
assertEquals(testBuilder.hashCode(), newBuilder.hashCode()); assertEquals(testBuilder.hashCode(), newBuilder.hashCode());
@ -521,7 +521,7 @@ public class SearchSourceBuilderTests extends ESTestCase {
{ {
String restContent = " { \"_source\": { \"includes\": \"include\", \"excludes\": \"*.field2\"}}"; String restContent = " { \"_source\": { \"includes\": \"include\", \"excludes\": \"*.field2\"}}";
try (XContentParser parser = XContentFactory.xContent(restContent).createParser(restContent)) { try (XContentParser parser = XContentFactory.xContent(restContent).createParser(restContent)) {
SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.fromXContent(parser, createParseContext(parser), SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.fromXContent(createParseContext(parser),
aggParsers, suggesters); aggParsers, suggesters);
assertArrayEquals(new String[]{"*.field2"}, searchSourceBuilder.fetchSource().excludes()); assertArrayEquals(new String[]{"*.field2"}, searchSourceBuilder.fetchSource().excludes());
assertArrayEquals(new String[]{"include"}, searchSourceBuilder.fetchSource().includes()); assertArrayEquals(new String[]{"include"}, searchSourceBuilder.fetchSource().includes());
@ -530,7 +530,7 @@ public class SearchSourceBuilderTests extends ESTestCase {
{ {
String restContent = " { \"_source\": false}"; String restContent = " { \"_source\": false}";
try (XContentParser parser = XContentFactory.xContent(restContent).createParser(restContent)) { try (XContentParser parser = XContentFactory.xContent(restContent).createParser(restContent)) {
SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.fromXContent(parser, createParseContext(parser), SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.fromXContent(createParseContext(parser),
aggParsers, suggesters); aggParsers, suggesters);
assertArrayEquals(new String[]{}, searchSourceBuilder.fetchSource().excludes()); assertArrayEquals(new String[]{}, searchSourceBuilder.fetchSource().excludes());
assertArrayEquals(new String[]{}, searchSourceBuilder.fetchSource().includes()); assertArrayEquals(new String[]{}, searchSourceBuilder.fetchSource().includes());
@ -543,7 +543,7 @@ public class SearchSourceBuilderTests extends ESTestCase {
{ {
String restContent = " { \"sort\": \"foo\"}"; String restContent = " { \"sort\": \"foo\"}";
try (XContentParser parser = XContentFactory.xContent(restContent).createParser(restContent)) { try (XContentParser parser = XContentFactory.xContent(restContent).createParser(restContent)) {
SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.fromXContent(parser, createParseContext(parser), SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.fromXContent(createParseContext(parser),
aggParsers, suggesters); aggParsers, suggesters);
assertEquals(1, searchSourceBuilder.sorts().size()); assertEquals(1, searchSourceBuilder.sorts().size());
assertEquals(new FieldSortBuilder("foo"), searchSourceBuilder.sorts().get(0)); assertEquals(new FieldSortBuilder("foo"), searchSourceBuilder.sorts().get(0));
@ -559,7 +559,7 @@ public class SearchSourceBuilderTests extends ESTestCase {
" \"_score\"\n" + " \"_score\"\n" +
" ]}"; " ]}";
try (XContentParser parser = XContentFactory.xContent(restContent).createParser(restContent)) { try (XContentParser parser = XContentFactory.xContent(restContent).createParser(restContent)) {
SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.fromXContent(parser, createParseContext(parser), SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.fromXContent(createParseContext(parser),
aggParsers, suggesters); aggParsers, suggesters);
assertEquals(5, searchSourceBuilder.sorts().size()); assertEquals(5, searchSourceBuilder.sorts().size());
assertEquals(new FieldSortBuilder("post_date"), searchSourceBuilder.sorts().get(0)); assertEquals(new FieldSortBuilder("post_date"), searchSourceBuilder.sorts().get(0));

View File

@ -19,6 +19,14 @@
package org.elasticsearch.index.reindex; package org.elasticsearch.index.reindex;
import static org.elasticsearch.common.unit.TimeValue.parseTimeValue;
import static org.elasticsearch.rest.RestRequest.Method.POST;
import static org.elasticsearch.rest.RestStatus.BAD_REQUEST;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import org.elasticsearch.action.WriteConsistencyLevel; import org.elasticsearch.action.WriteConsistencyLevel;
import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchRequest;
@ -45,14 +53,6 @@ import org.elasticsearch.script.Script;
import org.elasticsearch.search.aggregations.AggregatorParsers; import org.elasticsearch.search.aggregations.AggregatorParsers;
import org.elasticsearch.search.suggest.Suggesters; import org.elasticsearch.search.suggest.Suggesters;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import static org.elasticsearch.common.unit.TimeValue.parseTimeValue;
import static org.elasticsearch.rest.RestRequest.Method.POST;
import static org.elasticsearch.rest.RestStatus.BAD_REQUEST;
/** /**
* Expose IndexBySearchRequest over rest. * Expose IndexBySearchRequest over rest.
*/ */
@ -77,7 +77,7 @@ public class RestReindexAction extends AbstractBaseReindexRestHandler<ReindexReq
builder.map(source); builder.map(source);
parser = parser.contentType().xContent().createParser(builder.bytes()); parser = parser.contentType().xContent().createParser(builder.bytes());
context.queryParseContext.reset(parser); context.queryParseContext.reset(parser);
search.source().parseXContent(parser, context.queryParseContext, context.aggParsers, context.suggesters); search.source().parseXContent(context.queryParseContext, context.aggParsers, context.suggesters);
}; };
ObjectParser<IndexRequest, Void> destParser = new ObjectParser<>("dest"); ObjectParser<IndexRequest, Void> destParser = new ObjectParser<>("dest");