Merge branch 'master' into enhancement/use_shard_bulk_for_single_ops

This commit is contained in:
Lee Hinman 2017-01-09 14:22:00 -07:00
commit 7da6e0fb0f
29 changed files with 526 additions and 652 deletions

View File

@ -90,8 +90,7 @@ public class RestMultiSearchAction extends BaseRestHandler {
parseMultiLineRequest(restRequest, multiRequest.indicesOptions(), allowExplicitIndex, (searchRequest, parser) -> {
try {
final QueryParseContext queryParseContext = new QueryParseContext(parser, parseFieldMatcher);
searchRequest.source(SearchSourceBuilder.fromXContent(queryParseContext,
searchRequestParsers.aggParsers, searchRequestParsers.suggesters));
searchRequest.source(SearchSourceBuilder.fromXContent(queryParseContext, searchRequestParsers.suggesters));
multiRequest.add(searchRequest);
} catch (IOException e) {
throw new ElasticsearchParseException("Exception when parsing search request", e);

View File

@ -93,7 +93,7 @@ public class RestSearchAction extends BaseRestHandler {
searchRequest.indices(Strings.splitStringByCommaToArray(request.param("index")));
if (requestContentParser != null) {
QueryParseContext context = new QueryParseContext(requestContentParser, parseFieldMatcher);
searchRequest.source().parseXContent(context, searchRequestParsers.aggParsers, searchRequestParsers.suggesters);
searchRequest.source().parseXContent(context, searchRequestParsers.suggesters);
}
// do not allow 'query_and_fetch' or 'dfs_query_and_fetch' search types

View File

@ -94,8 +94,8 @@ import org.elasticsearch.plugins.SearchPlugin.ScoreFunctionSpec;
import org.elasticsearch.plugins.SearchPlugin.SearchExtSpec;
import org.elasticsearch.plugins.SearchPlugin.SearchExtensionSpec;
import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorParsers;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.BaseAggregationBuilder;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.PipelineAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.children.ChildrenAggregationBuilder;
@ -268,10 +268,6 @@ public class SearchModule {
private final boolean transportClient;
private final Map<String, Highlighter> highlighters;
private final Map<String, Suggester<?>> suggesters;
private final ParseFieldRegistry<Aggregator.Parser> aggregationParserRegistry = new ParseFieldRegistry<>("aggregation");
private final ParseFieldRegistry<PipelineAggregator.Parser> pipelineAggregationParserRegistry = new ParseFieldRegistry<>(
"pipline_aggregation");
private final AggregatorParsers aggregatorParsers = new AggregatorParsers(aggregationParserRegistry, pipelineAggregationParserRegistry);
private final ParseFieldRegistry<SignificanceHeuristicParser> significanceHeuristicParserRegistry = new ParseFieldRegistry<>(
"significance_heuristic");
private final ParseFieldRegistry<MovAvgModel.AbstractModelParser> movingAverageModelParserRegistry = new ParseFieldRegistry<>(
@ -301,7 +297,7 @@ public class SearchModule {
registerFetchSubPhases(plugins);
registerSearchExts(plugins);
registerShapes();
searchRequestParsers = new SearchRequestParsers(aggregatorParsers, getSuggesters());
searchRequestParsers = new SearchRequestParsers(getSuggesters());
}
public List<NamedWriteableRegistry.Entry> getNamedWriteables() {
@ -341,13 +337,6 @@ public class SearchModule {
return movingAverageModelParserRegistry;
}
/**
* Parsers for {@link AggregationBuilder}s and {@link PipelineAggregationBuilder}s.
*/
public AggregatorParsers getAggregatorParsers() {
return aggregatorParsers;
}
private void registerAggregations(List<SearchPlugin> plugins) {
registerAggregation(new AggregationSpec(AvgAggregationBuilder.NAME, AvgAggregationBuilder::new, AvgAggregationBuilder::parse)
.addResultReader(InternalAvg::new));
@ -433,7 +422,10 @@ public class SearchModule {
private void registerAggregation(AggregationSpec spec) {
if (false == transportClient) {
aggregationParserRegistry.register(spec.getParser(), spec.getName());
namedXContents.add(new NamedXContentRegistry.Entry(BaseAggregationBuilder.class, spec.getName(), (p, c) -> {
AggregatorFactories.AggParseContext context = (AggregatorFactories.AggParseContext) c;
return spec.getParser().parse(context.name, context.queryParseContext);
}));
}
namedWriteables.add(
new NamedWriteableRegistry.Entry(AggregationBuilder.class, spec.getName().getPreferredName(), spec.getReader()));
@ -527,7 +519,10 @@ public class SearchModule {
private void registerPipelineAggregation(PipelineAggregationSpec spec) {
if (false == transportClient) {
pipelineAggregationParserRegistry.register(spec.getParser(), spec.getName());
namedXContents.add(new NamedXContentRegistry.Entry(BaseAggregationBuilder.class, spec.getName(), (p, c) -> {
AggregatorFactories.AggParseContext context = (AggregatorFactories.AggParseContext) c;
return spec.getParser().parse(context.name, context.queryParseContext);
}));
}
namedWriteables.add(
new NamedWriteableRegistry.Entry(PipelineAggregationBuilder.class, spec.getName().getPreferredName(), spec.getReader()));

View File

@ -19,8 +19,6 @@
package org.elasticsearch.search;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.search.aggregations.AggregatorParsers;
import org.elasticsearch.search.suggest.Suggesters;
/**
@ -32,25 +30,14 @@ public class SearchRequestParsers {
// methods split across RestSearchAction and SearchSourceBuilder should be moved here
// TODO: make all members private once parsing functions are moved here
// TODO: AggregatorParsers should be removed and the underlying maps of agg
// and pipeline agg parsers should be here
/**
* Agg and pipeline agg parsers that may be used in search requests.
* @see org.elasticsearch.search.builder.SearchSourceBuilder#fromXContent(QueryParseContext, AggregatorParsers,
* Suggesters)
*/
public final AggregatorParsers aggParsers;
// TODO: Suggesters should be removed and the underlying map moved here
/**
* Suggesters that may be used in search requests.
* @see org.elasticsearch.search.builder.SearchSourceBuilder#fromXContent(QueryParseContext, AggregatorParsers,
* Suggesters)
* @see org.elasticsearch.search.builder.SearchSourceBuilder#fromXContent(QueryParseContext, Suggesters)
*/
public final Suggesters suggesters;
public SearchRequestParsers(AggregatorParsers aggParsers, Suggesters suggesters) {
this.aggParsers = aggParsers;
public SearchRequestParsers(Suggesters suggesters) {
this.suggesters = suggesters;
}
}

View File

@ -117,6 +117,7 @@ public abstract class AbstractAggregationBuilder<AB extends AbstractAggregationB
return (AB) this;
}
@Override
public String getType() {
return type.name();
}

View File

@ -23,6 +23,7 @@ import org.elasticsearch.action.support.ToXContentToBytes;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.io.stream.NamedWriteable;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
import org.elasticsearch.search.internal.SearchContext;
@ -34,7 +35,7 @@ import java.util.Map;
*/
public abstract class AggregationBuilder
extends ToXContentToBytes
implements NamedWriteable, ToXContent {
implements NamedWriteable, ToXContent, BaseAggregationBuilder {
protected final String name;
protected final Type type;
@ -66,6 +67,7 @@ public abstract class AggregationBuilder
protected abstract AggregatorFactory<?> build(SearchContext context, AggregatorFactory<?> parent) throws IOException;
/** Associate metadata with this {@link AggregationBuilder}. */
@Override
public abstract AggregationBuilder setMetaData(Map<String, Object> metaData);
/** Add a sub aggregation to this builder. */
@ -77,13 +79,14 @@ public abstract class AggregationBuilder
/**
* Internal: Registers sub-factories with this factory. The sub-factory will be
* responsible for the creation of sub-aggregators under the aggregator
* created by this factory. This is only for use by {@link AggregatorParsers}.
* created by this factory. This is only for use by {@link AggregatorFactories#parseAggregators(QueryParseContext)}.
*
* @param subFactories
* The sub-factories
* @return this factory (fluent interface)
*/
protected abstract AggregationBuilder subAggregations(AggregatorFactories.Builder subFactories);
@Override
public abstract AggregationBuilder subAggregations(AggregatorFactories.Builder subFactories);
/** Common xcontent fields shared among aggregator builders */
public static final class CommonFields extends ParseField.CommonFields {

View File

@ -19,10 +19,13 @@
package org.elasticsearch.search.aggregations;
import org.elasticsearch.action.support.ToXContentToBytes;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.search.aggregations.support.AggregationPath;
import org.elasticsearch.search.aggregations.support.AggregationPath.PathElement;
@ -40,8 +43,126 @@ import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class AggregatorFactories {
public static final Pattern VALID_AGG_NAME = Pattern.compile("[^\\[\\]>]+");
/**
* Parses the aggregation request recursively generating aggregator factories in turn.
*
* @param parseContext The parse context.
*
* @return The parsed aggregator factories.
*
* @throws IOException When parsing fails for unknown reasons.
*/
public static AggregatorFactories.Builder parseAggregators(QueryParseContext parseContext) throws IOException {
return parseAggregators(parseContext, 0);
}
private static AggregatorFactories.Builder parseAggregators(QueryParseContext parseContext, int level) throws IOException {
Matcher validAggMatcher = VALID_AGG_NAME.matcher("");
AggregatorFactories.Builder factories = new AggregatorFactories.Builder();
XContentParser.Token token = null;
XContentParser parser = parseContext.parser();
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token != XContentParser.Token.FIELD_NAME) {
throw new ParsingException(parser.getTokenLocation(),
"Unexpected token " + token + " in [aggs]: aggregations definitions must start with the name of the aggregation.");
}
final String aggregationName = parser.currentName();
if (!validAggMatcher.reset(aggregationName).matches()) {
throw new ParsingException(parser.getTokenLocation(), "Invalid aggregation name [" + aggregationName
+ "]. Aggregation names must be alpha-numeric and can only contain '_' and '-'");
}
token = parser.nextToken();
if (token != XContentParser.Token.START_OBJECT) {
throw new ParsingException(parser.getTokenLocation(), "Aggregation definition for [" + aggregationName + " starts with a ["
+ token + "], expected a [" + XContentParser.Token.START_OBJECT + "].");
}
BaseAggregationBuilder aggBuilder = null;
AggregatorFactories.Builder subFactories = null;
Map<String, Object> metaData = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token != XContentParser.Token.FIELD_NAME) {
throw new ParsingException(
parser.getTokenLocation(), "Expected [" + XContentParser.Token.FIELD_NAME + "] under a ["
+ XContentParser.Token.START_OBJECT + "], but got a [" + token + "] in [" + aggregationName + "]",
parser.getTokenLocation());
}
final String fieldName = parser.currentName();
token = parser.nextToken();
if (token == XContentParser.Token.START_OBJECT) {
switch (fieldName) {
case "meta":
metaData = parser.map();
break;
case "aggregations":
case "aggs":
if (subFactories != null) {
throw new ParsingException(parser.getTokenLocation(),
"Found two sub aggregation definitions under [" + aggregationName + "]");
}
subFactories = parseAggregators(parseContext, level + 1);
break;
default:
if (aggBuilder != null) {
throw new ParsingException(parser.getTokenLocation(), "Found two aggregation type definitions in ["
+ aggregationName + "]: [" + aggBuilder.getType() + "] and [" + fieldName + "]");
}
aggBuilder = parser.namedObject(BaseAggregationBuilder.class, fieldName,
new AggParseContext(aggregationName, parseContext));
}
} else {
throw new ParsingException(parser.getTokenLocation(), "Expected [" + XContentParser.Token.START_OBJECT + "] under ["
+ fieldName + "], but got a [" + token + "] in [" + aggregationName + "]");
}
}
if (aggBuilder == null) {
throw new ParsingException(parser.getTokenLocation(), "Missing definition for aggregation [" + aggregationName + "]",
parser.getTokenLocation());
} else {
if (metaData != null) {
aggBuilder.setMetaData(metaData);
}
if (subFactories != null) {
aggBuilder.subAggregations(subFactories);
}
if (aggBuilder instanceof AggregationBuilder) {
factories.addAggregator((AggregationBuilder) aggBuilder);
} else {
factories.addPipelineAggregator((PipelineAggregationBuilder) aggBuilder);
}
}
}
return factories;
}
/**
* Context to parse and aggregation. This should eventually be removed and replaced with a String.
*/
public static final class AggParseContext {
public final String name;
public final QueryParseContext queryParseContext;
public AggParseContext(String name, QueryParseContext queryParseContext) {
this.name = name;
this.queryParseContext = queryParseContext;
}
}
public static final AggregatorFactories EMPTY = new AggregatorFactories(null, new AggregatorFactory<?>[0],
new ArrayList<PipelineAggregationBuilder>());

View File

@ -1,192 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.xcontent.ParseFieldRegistry;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import java.io.IOException;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* A registry for all the aggregator parser, also servers as the main parser for the aggregations module
*/
public class AggregatorParsers {
public static final Pattern VALID_AGG_NAME = Pattern.compile("[^\\[\\]>]+");
private final ParseFieldRegistry<Aggregator.Parser> aggregationParserRegistry;
private final ParseFieldRegistry<PipelineAggregator.Parser> pipelineAggregationParserRegistry;
public AggregatorParsers(ParseFieldRegistry<Aggregator.Parser> aggregationParserRegistry,
ParseFieldRegistry<PipelineAggregator.Parser> pipelineAggregationParserRegistry) {
this.aggregationParserRegistry = aggregationParserRegistry;
this.pipelineAggregationParserRegistry = pipelineAggregationParserRegistry;
}
/**
* Returns the parser that is registered under the given aggregation type.
*
* @param type The aggregation type
* @return The parser associated with the given aggregation type or null if it wasn't found.
*/
public Aggregator.Parser parser(String type) {
return aggregationParserRegistry.lookupReturningNullIfNotFound(type);
}
/**
* Returns the parser that is registered under the given pipeline aggregator type.
*
* @param type The pipeline aggregator type
* @return The parser associated with the given pipeline aggregator type or null if it wasn't found.
*/
public PipelineAggregator.Parser pipelineParser(String type) {
return pipelineAggregationParserRegistry.lookupReturningNullIfNotFound(type);
}
/**
* Parses the aggregation request recursively generating aggregator factories in turn.
*
* @param parseContext The parse context.
*
* @return The parsed aggregator factories.
*
* @throws IOException When parsing fails for unknown reasons.
*/
public AggregatorFactories.Builder parseAggregators(QueryParseContext parseContext) throws IOException {
return parseAggregators(parseContext, 0);
}
private AggregatorFactories.Builder parseAggregators(QueryParseContext parseContext, int level) throws IOException {
Matcher validAggMatcher = VALID_AGG_NAME.matcher("");
AggregatorFactories.Builder factories = new AggregatorFactories.Builder();
XContentParser.Token token = null;
XContentParser parser = parseContext.parser();
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token != XContentParser.Token.FIELD_NAME) {
throw new ParsingException(parser.getTokenLocation(),
"Unexpected token " + token + " in [aggs]: aggregations definitions must start with the name of the aggregation.");
}
final String aggregationName = parser.currentName();
if (!validAggMatcher.reset(aggregationName).matches()) {
throw new ParsingException(parser.getTokenLocation(), "Invalid aggregation name [" + aggregationName
+ "]. Aggregation names must be alpha-numeric and can only contain '_' and '-'");
}
token = parser.nextToken();
if (token != XContentParser.Token.START_OBJECT) {
throw new ParsingException(parser.getTokenLocation(), "Aggregation definition for [" + aggregationName + " starts with a ["
+ token + "], expected a [" + XContentParser.Token.START_OBJECT + "].");
}
AggregationBuilder aggFactory = null;
PipelineAggregationBuilder pipelineAggregatorFactory = null;
AggregatorFactories.Builder subFactories = null;
Map<String, Object> metaData = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token != XContentParser.Token.FIELD_NAME) {
throw new ParsingException(
parser.getTokenLocation(), "Expected [" + XContentParser.Token.FIELD_NAME + "] under a ["
+ XContentParser.Token.START_OBJECT + "], but got a [" + token + "] in [" + aggregationName + "]",
parser.getTokenLocation());
}
final String fieldName = parser.currentName();
token = parser.nextToken();
if (token == XContentParser.Token.START_OBJECT) {
switch (fieldName) {
case "meta":
metaData = parser.map();
break;
case "aggregations":
case "aggs":
if (subFactories != null) {
throw new ParsingException(parser.getTokenLocation(),
"Found two sub aggregation definitions under [" + aggregationName + "]");
}
subFactories = parseAggregators(parseContext, level + 1);
break;
default:
if (aggFactory != null) {
throw new ParsingException(parser.getTokenLocation(), "Found two aggregation type definitions in ["
+ aggregationName + "]: [" + aggFactory.type + "] and [" + fieldName + "]");
}
if (pipelineAggregatorFactory != null) {
throw new ParsingException(parser.getTokenLocation(), "Found two aggregation type definitions in ["
+ aggregationName + "]: [" + pipelineAggregatorFactory + "] and [" + fieldName + "]");
}
Aggregator.Parser aggregatorParser = parser(fieldName);
if (aggregatorParser == null) {
PipelineAggregator.Parser pipelineAggregatorParser = pipelineParser(fieldName);
if (pipelineAggregatorParser == null) {
throw new ParsingException(parser.getTokenLocation(),
"Could not find aggregator type [" + fieldName + "] in [" + aggregationName + "]");
} else {
pipelineAggregatorFactory = pipelineAggregatorParser.parse(aggregationName, parseContext);
}
} else {
aggFactory = aggregatorParser.parse(aggregationName, parseContext);
}
}
} else {
throw new ParsingException(parser.getTokenLocation(), "Expected [" + XContentParser.Token.START_OBJECT + "] under ["
+ fieldName + "], but got a [" + token + "] in [" + aggregationName + "]");
}
}
if (aggFactory == null && pipelineAggregatorFactory == null) {
throw new ParsingException(parser.getTokenLocation(), "Missing definition for aggregation [" + aggregationName + "]",
parser.getTokenLocation());
} else if (aggFactory != null) {
assert pipelineAggregatorFactory == null;
if (metaData != null) {
aggFactory.setMetaData(metaData);
}
if (subFactories != null) {
aggFactory.subAggregations(subFactories);
}
factories.addAggregator(aggFactory);
} else {
assert pipelineAggregatorFactory != null;
if (subFactories != null) {
throw new ParsingException(parser.getTokenLocation(),
"Aggregation [" + aggregationName + "] cannot define sub-aggregations",
parser.getTokenLocation());
}
if (metaData != null) {
pipelineAggregatorFactory.setMetaData(metaData);
}
factories.addPipelineAggregator(pipelineAggregatorFactory);
}
}
return factories;
}
}

View File

@ -0,0 +1,46 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
import java.util.Map;
/**
* Interface shared by {@link AggregationBuilder} and {@link PipelineAggregationBuilder} so they can conveniently share the same namespace
* for {@link XContentParser#namedObject(Class, String, Object)}.
*/
public interface BaseAggregationBuilder {
/**
* The name of the type of aggregation built by this builder.
*/
String getType();
/**
* Set the aggregation's metadata. Returns {@code this} for chaining.
*/
BaseAggregationBuilder setMetaData(Map<String, Object> metaData);
/**
* Set the sub aggregations if this aggregation supports sub aggregations. Returns {@code this} for chaining.
*/
BaseAggregationBuilder subAggregations(Builder subFactories);
}

View File

@ -20,7 +20,7 @@ package org.elasticsearch.search.aggregations;
import org.elasticsearch.action.support.ToXContentToBytes;
import org.elasticsearch.common.io.stream.NamedWriteable;
import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import java.io.IOException;
@ -32,7 +32,7 @@ import java.util.Map;
* specific type.
*/
public abstract class PipelineAggregationBuilder extends ToXContentToBytes
implements NamedWriteable {
implements NamedWriteable, BaseAggregationBuilder {
protected final String name;
protected final String[] bucketsPaths;
@ -79,6 +79,11 @@ public abstract class PipelineAggregationBuilder extends ToXContentToBytes
protected abstract PipelineAggregator create() throws IOException;
/** Associate metadata with this {@link PipelineAggregationBuilder}. */
@Override
public abstract PipelineAggregationBuilder setMetaData(Map<String, Object> metaData);
@Override
public PipelineAggregationBuilder subAggregations(Builder subFactories) {
throw new IllegalArgumentException("Aggregation [" + name + "] cannot define sub-aggregations");
}
}

View File

@ -171,4 +171,8 @@ public abstract class AbstractPipelineAggregationBuilder<PAB extends AbstractPip
protected abstract boolean doEquals(Object obj);
@Override
public String getType() {
return type;
}
}

View File

@ -40,7 +40,6 @@ import org.elasticsearch.script.Script;
import org.elasticsearch.search.SearchExtBuilder;
import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.AggregatorParsers;
import org.elasticsearch.search.aggregations.PipelineAggregationBuilder;
import org.elasticsearch.search.fetch.StoredFieldsContext;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
@ -104,10 +103,9 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
public static final ParseField SLICE = new ParseField("slice");
public static final ParseField ALL_FIELDS_FIELDS = new ParseField("all_fields");
public static SearchSourceBuilder fromXContent(QueryParseContext context, AggregatorParsers aggParsers,
Suggesters suggesters) throws IOException {
public static SearchSourceBuilder fromXContent(QueryParseContext context, Suggesters suggesters) throws IOException {
SearchSourceBuilder builder = new SearchSourceBuilder();
builder.parseXContent(context, aggParsers, suggesters);
builder.parseXContent(context, suggesters);
return builder;
}
@ -912,12 +910,9 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
/**
* Parse some xContent into this SearchSourceBuilder, overwriting any values specified in the xContent. Use this if you need to set up
* different defaults than a regular SearchSourceBuilder would have and use
* {@link #fromXContent(QueryParseContext, AggregatorParsers, Suggesters)} if you have normal defaults.
* {@link #fromXContent(QueryParseContext, Suggesters)} if you have normal defaults.
*/
public void parseXContent(QueryParseContext context, AggregatorParsers aggParsers,
Suggesters suggesters)
throws IOException {
public void parseXContent(QueryParseContext context, Suggesters suggesters) throws IOException {
XContentParser parser = context.parser();
XContentParser.Token token = parser.currentToken();
String currentFieldName = null;
@ -989,7 +984,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
}
} else if (AGGREGATIONS_FIELD.match(currentFieldName)
|| AGGS_FIELD.match(currentFieldName)) {
aggregations = aggParsers.parseAggregators(context);
aggregations = AggregatorFactories.parseAggregators(context);
} else if (HIGHLIGHT_FIELD.match(currentFieldName)) {
highlightBuilder = HighlightBuilder.fromXContent(context);
} else if (SUGGEST_FIELD.match(currentFieldName)) {

View File

@ -163,7 +163,7 @@ public class MultiSearchRequestTests extends ESTestCase {
private MultiSearchRequest parseMultiSearchRequest(String sample) throws IOException {
byte[] data = StreamsUtils.copyToBytesFromClasspath(sample);
RestRequest restRequest = new FakeRestRequest.Builder(xContentRegistry()).withContent(new BytesArray(data)).build();
return RestMultiSearchAction.parseRequest(restRequest, true, new SearchRequestParsers(null, null), ParseFieldMatcher.EMPTY);
return RestMultiSearchAction.parseRequest(restRequest, true, new SearchRequestParsers(null), ParseFieldMatcher.EMPTY);
}
@Override

View File

@ -32,6 +32,7 @@ import org.elasticsearch.plugins.SearchPlugin;
import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.BaseAggregationBuilder;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.InternalAggregation.ReduceContext;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.ChiSquare;
@ -74,8 +75,10 @@ import java.util.Set;
import static java.util.Collections.emptyList;
import static java.util.Collections.singletonList;
import static java.util.Collections.singletonMap;
import static java.util.stream.Collectors.toList;
import static java.util.stream.Collectors.toSet;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.hasSize;
public class SearchModuleTests extends ModuleTestCase {
@ -149,8 +152,8 @@ public class SearchModuleTests extends ModuleTestCase {
TermsAggregationBuilder::parse));
}
};
expectThrows(IllegalArgumentException.class, () -> new SearchModule(Settings.EMPTY, false,
singletonList(registersDupeAggregation)));
expectThrows(IllegalArgumentException.class, () -> new NamedXContentRegistry(new SearchModule(Settings.EMPTY, false,
singletonList(registersDupeAggregation)).getNamedXContents()));
SearchPlugin registersDupePipelineAggregation = new SearchPlugin() {
public List<PipelineAggregationSpec> getPipelineAggregations() {
@ -162,8 +165,8 @@ public class SearchModuleTests extends ModuleTestCase {
.addResultReader(InternalDerivative::new));
}
};
expectThrows(IllegalArgumentException.class, () -> new SearchModule(Settings.EMPTY, false,
singletonList(registersDupePipelineAggregation)));
expectThrows(IllegalArgumentException.class, () -> new NamedXContentRegistry(new SearchModule(Settings.EMPTY, false,
singletonList(registersDupePipelineAggregation)).getNamedXContents()));
}
public void testRegisterSuggester() {
@ -221,7 +224,11 @@ public class SearchModuleTests extends ModuleTestCase {
}
}));
assertNotNull(module.getAggregatorParsers().parser("test"));
assertThat(
module.getNamedXContents().stream()
.filter(entry -> entry.categoryClass.equals(BaseAggregationBuilder.class) && entry.name.match("test"))
.collect(toList()),
hasSize(1));
}
public void testRegisterPipelineAggregation() {
@ -232,7 +239,11 @@ public class SearchModuleTests extends ModuleTestCase {
}
}));
assertNotNull(module.getAggregatorParsers().pipelineParser("test"));
assertThat(
module.getNamedXContents().stream()
.filter(entry -> entry.categoryClass.equals(BaseAggregationBuilder.class) && entry.name.match("test"))
.collect(toList()),
hasSize(1));
}
private static final String[] NON_DEPRECATED_QUERIES = new String[] {

View File

@ -24,7 +24,6 @@ import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.search.SearchRequestParsers;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.test.ESSingleNodeTestCase;
@ -59,12 +58,11 @@ public class AggregationCollectorTests extends ESSingleNodeTestCase {
}
private boolean needsScores(IndexService index, String agg) throws IOException {
AggregatorParsers parser = getInstanceFromNode(SearchRequestParsers.class).aggParsers;
XContentParser aggParser = createParser(JsonXContent.jsonXContent, agg);
QueryParseContext parseContext = new QueryParseContext(aggParser, ParseFieldMatcher.STRICT);
aggParser.nextToken();
SearchContext context = createSearchContext(index);
final AggregatorFactories factories = parser.parseAggregators(parseContext).build(context, null);
final AggregatorFactories factories = AggregatorFactories.parseAggregators(parseContext).build(context, null);
final Aggregator[] aggregators = factories.createTopLevelAggregators();
assertEquals(1, aggregators.length);
return aggregators[0].needsScores();

View File

@ -18,14 +18,56 @@
*/
package org.elasticsearch.search.aggregations;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilders;
import org.elasticsearch.test.AbstractQueryTestCase;
import org.elasticsearch.test.ESTestCase;
import java.util.List;
import java.util.Random;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import static java.util.Collections.emptyList;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
public class AggregatorFactoriesTests extends ESTestCase {
private String[] currentTypes;
private NamedXContentRegistry xContentRegistry;
protected ParseFieldMatcher parseFieldMatcher;
@Override
public void setUp() throws Exception {
super.setUp();
// we have to prefer CURRENT since with the range of versions we support
// it's rather unlikely to get the current actually.
Settings settings = Settings.builder().put("node.name", AbstractQueryTestCase.class.toString())
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
.put(ScriptService.SCRIPT_AUTO_RELOAD_ENABLED_SETTING.getKey(), false).build();
// create some random type with some default field, those types will
// stick around for all of the subclasses
currentTypes = new String[randomIntBetween(0, 5)];
for (int i = 0; i < currentTypes.length; i++) {
String type = randomAsciiOfLengthBetween(1, 10);
currentTypes[i] = type;
}
xContentRegistry = new NamedXContentRegistry(new SearchModule(settings, false, emptyList()).getNamedXContents());
parseFieldMatcher = ParseFieldMatcher.STRICT;
}
public void testGetAggregatorFactories_returnsUnmodifiableList() {
AggregatorFactories.Builder builder = new AggregatorFactories.Builder().addAggregator(AggregationBuilders.avg("foo"));
@ -42,4 +84,175 @@ public class AggregatorFactoriesTests extends ESTestCase {
expectThrows(UnsupportedOperationException.class,
() -> pipelineAggregatorFactories.add(PipelineAggregatorBuilders.avgBucket("bar", "path2")));
}
public void testTwoTypes() throws Exception {
XContentBuilder source = JsonXContent.contentBuilder()
.startObject()
.startObject("in_stock")
.startObject("filter")
.startObject("range")
.startObject("stock")
.field("gt", 0)
.endObject()
.endObject()
.endObject()
.startObject("terms")
.field("field", "stock")
.endObject()
.endObject()
.endObject();
XContentParser parser = createParser(source);
QueryParseContext parseContext = new QueryParseContext(parser, parseFieldMatcher);
assertSame(XContentParser.Token.START_OBJECT, parser.nextToken());
Exception e = expectThrows(ParsingException.class, () -> AggregatorFactories.parseAggregators(parseContext));
assertThat(e.toString(), containsString("Found two aggregation type definitions in [in_stock]: [filter] and [terms]"));
}
public void testTwoAggs() throws Exception {
assumeFalse("Test only makes sense if XContent parser doesn't have strict duplicate checks enabled",
XContent.isStrictDuplicateDetectionEnabled());
XContentBuilder source = JsonXContent.contentBuilder()
.startObject()
.startObject("by_date")
.startObject("date_histogram")
.field("field", "timestamp")
.field("interval", "month")
.endObject()
.startObject("aggs")
.startObject("tag_count")
.startObject("cardinality")
.field("field", "tag")
.endObject()
.endObject()
.endObject()
.startObject("aggs") // 2nd "aggs": illegal
.startObject("tag_count2")
.startObject("cardinality")
.field("field", "tag")
.endObject()
.endObject()
.endObject()
.endObject()
.endObject();
XContentParser parser = createParser(source);
QueryParseContext parseContext = new QueryParseContext(parser, parseFieldMatcher);
assertSame(XContentParser.Token.START_OBJECT, parser.nextToken());
Exception e = expectThrows(ParsingException.class, () -> AggregatorFactories.parseAggregators(parseContext));
assertThat(e.toString(), containsString("Found two sub aggregation definitions under [by_date]"));
}
public void testInvalidAggregationName() throws Exception {
Matcher matcher = Pattern.compile("[^\\[\\]>]+").matcher("");
String name;
Random rand = random();
int len = randomIntBetween(1, 5);
char[] word = new char[len];
while (true) {
for (int i = 0; i < word.length; i++) {
word[i] = (char) rand.nextInt(127);
}
name = String.valueOf(word);
if (!matcher.reset(name).matches()) {
break;
}
}
XContentBuilder source = JsonXContent.contentBuilder()
.startObject()
.startObject(name)
.startObject("filter")
.startObject("range")
.startObject("stock")
.field("gt", 0)
.endObject()
.endObject()
.endObject()
.endObject()
.endObject();
XContentParser parser = createParser(source);
QueryParseContext parseContext = new QueryParseContext(parser, parseFieldMatcher);
assertSame(XContentParser.Token.START_OBJECT, parser.nextToken());
Exception e = expectThrows(ParsingException.class, () -> AggregatorFactories.parseAggregators(parseContext));
assertThat(e.toString(), containsString("Invalid aggregation name [" + name + "]"));
}
public void testSameAggregationName() throws Exception {
assumeFalse("Test only makes sense if XContent parser doesn't have strict duplicate checks enabled",
XContent.isStrictDuplicateDetectionEnabled());
final String name = randomAsciiOfLengthBetween(1, 10);
XContentBuilder source = JsonXContent.contentBuilder()
.startObject()
.startObject(name)
.startObject("terms")
.field("field", "a")
.endObject()
.endObject()
.startObject(name)
.startObject("terms")
.field("field", "b")
.endObject()
.endObject()
.endObject();
XContentParser parser = createParser(source);
QueryParseContext parseContext = new QueryParseContext(parser, parseFieldMatcher);
assertSame(XContentParser.Token.START_OBJECT, parser.nextToken());
Exception e = expectThrows(ParsingException.class, () -> AggregatorFactories.parseAggregators(parseContext));
assertThat(e.toString(), containsString("Two sibling aggregations cannot have the same name: [" + name + "]"));
}
public void testMissingName() throws Exception {
XContentBuilder source = JsonXContent.contentBuilder()
.startObject()
.startObject("by_date")
.startObject("date_histogram")
.field("field", "timestamp")
.field("interval", "month")
.endObject()
.startObject("aggs")
// the aggregation name is missing
//.startObject("tag_count")
.startObject("cardinality")
.field("field", "tag")
.endObject()
//.endObject()
.endObject()
.endObject()
.endObject();
XContentParser parser = createParser(source);
QueryParseContext parseContext = new QueryParseContext(parser, parseFieldMatcher);
assertSame(XContentParser.Token.START_OBJECT, parser.nextToken());
Exception e = expectThrows(ParsingException.class, () -> AggregatorFactories.parseAggregators(parseContext));
assertThat(e.toString(), containsString("Expected [START_OBJECT] under [field], but got a [VALUE_STRING] in [cardinality]"));
}
public void testMissingType() throws Exception {
XContentBuilder source = JsonXContent.contentBuilder()
.startObject()
.startObject("by_date")
.startObject("date_histogram")
.field("field", "timestamp")
.field("interval", "month")
.endObject()
.startObject("aggs")
.startObject("tag_count")
// the aggregation type is missing
//.startObject("cardinality")
.field("field", "tag")
//.endObject()
.endObject()
.endObject()
.endObject()
.endObject();
XContentParser parser = createParser(source);
QueryParseContext parseContext = new QueryParseContext(parser, parseFieldMatcher);
assertSame(XContentParser.Token.START_OBJECT, parser.nextToken());
Exception e = expectThrows(ParsingException.class, () -> AggregatorFactories.parseAggregators(parseContext));
assertThat(e.toString(), containsString("Expected [START_OBJECT] under [field], but got a [VALUE_STRING] in [tag_count]"));
}
@Override
protected NamedXContentRegistry xContentRegistry() {
return xContentRegistry;
}
}

View File

@ -1,273 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.test.AbstractQueryTestCase;
import org.elasticsearch.test.ESTestCase;
import java.util.Random;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import static java.util.Collections.emptyList;
import static org.hamcrest.Matchers.containsString;
public class AggregatorParsingTests extends ESTestCase {
private String[] currentTypes;
protected String[] getCurrentTypes() {
return currentTypes;
}
protected AggregatorParsers aggParsers;
private NamedXContentRegistry xContentRegistry;
protected ParseFieldMatcher parseFieldMatcher;
/**
* Setup for the whole base test class.
*/
@Override
public void setUp() throws Exception {
super.setUp();
// we have to prefer CURRENT since with the range of versions we support
// it's rather unlikely to get the current actually.
Settings settings = Settings.builder().put("node.name", AbstractQueryTestCase.class.toString())
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
.put(ScriptService.SCRIPT_AUTO_RELOAD_ENABLED_SETTING.getKey(), false).build();
SearchModule searchModule = new SearchModule(settings, false, emptyList());
aggParsers = searchModule.getSearchRequestParsers().aggParsers;
// create some random type with some default field, those types will
// stick around for all of the subclasses
currentTypes = new String[randomIntBetween(0, 5)];
for (int i = 0; i < currentTypes.length; i++) {
String type = randomAsciiOfLengthBetween(1, 10);
currentTypes[i] = type;
}
xContentRegistry = new NamedXContentRegistry(searchModule.getNamedXContents());
parseFieldMatcher = ParseFieldMatcher.STRICT;
}
public void testTwoTypes() throws Exception {
XContentBuilder source = JsonXContent.contentBuilder()
.startObject()
.startObject("in_stock")
.startObject("filter")
.startObject("range")
.startObject("stock")
.field("gt", 0)
.endObject()
.endObject()
.endObject()
.startObject("terms")
.field("field", "stock")
.endObject()
.endObject()
.endObject();
try {
XContentParser parser = createParser(source);
QueryParseContext parseContext = new QueryParseContext(parser, parseFieldMatcher);
assertSame(XContentParser.Token.START_OBJECT, parser.nextToken());
aggParsers.parseAggregators(parseContext);
fail();
} catch (ParsingException e) {
assertThat(e.toString(), containsString("Found two aggregation type definitions in [in_stock]: [filter] and [terms]"));
}
}
public void testTwoAggs() throws Exception {
assumeFalse("Test only makes sense if XContent parser doesn't have strict duplicate checks enabled",
XContent.isStrictDuplicateDetectionEnabled());
XContentBuilder source = JsonXContent.contentBuilder()
.startObject()
.startObject("by_date")
.startObject("date_histogram")
.field("field", "timestamp")
.field("interval", "month")
.endObject()
.startObject("aggs")
.startObject("tag_count")
.startObject("cardinality")
.field("field", "tag")
.endObject()
.endObject()
.endObject()
.startObject("aggs") // 2nd "aggs": illegal
.startObject("tag_count2")
.startObject("cardinality")
.field("field", "tag")
.endObject()
.endObject()
.endObject()
.endObject()
.endObject();
try {
XContentParser parser = createParser(source);
QueryParseContext parseContext = new QueryParseContext(parser, parseFieldMatcher);
assertSame(XContentParser.Token.START_OBJECT, parser.nextToken());
aggParsers.parseAggregators(parseContext);
fail();
} catch (ParsingException e) {
assertThat(e.toString(), containsString("Found two sub aggregation definitions under [by_date]"));
}
}
public void testInvalidAggregationName() throws Exception {
Matcher matcher = Pattern.compile("[^\\[\\]>]+").matcher("");
String name;
Random rand = random();
int len = randomIntBetween(1, 5);
char[] word = new char[len];
while (true) {
for (int i = 0; i < word.length; i++) {
word[i] = (char) rand.nextInt(127);
}
name = String.valueOf(word);
if (!matcher.reset(name).matches()) {
break;
}
}
XContentBuilder source = JsonXContent.contentBuilder()
.startObject()
.startObject(name)
.startObject("filter")
.startObject("range")
.startObject("stock")
.field("gt", 0)
.endObject()
.endObject()
.endObject()
.endObject()
.endObject();
try {
XContentParser parser = createParser(source);
QueryParseContext parseContext = new QueryParseContext(parser, parseFieldMatcher);
assertSame(XContentParser.Token.START_OBJECT, parser.nextToken());
aggParsers.parseAggregators(parseContext);
fail();
} catch (ParsingException e) {
assertThat(e.toString(), containsString("Invalid aggregation name [" + name + "]"));
}
}
public void testSameAggregationName() throws Exception {
assumeFalse("Test only makes sense if XContent parser doesn't have strict duplicate checks enabled",
XContent.isStrictDuplicateDetectionEnabled());
final String name = randomAsciiOfLengthBetween(1, 10);
XContentBuilder source = JsonXContent.contentBuilder()
.startObject()
.startObject(name)
.startObject("terms")
.field("field", "a")
.endObject()
.endObject()
.startObject(name)
.startObject("terms")
.field("field", "b")
.endObject()
.endObject()
.endObject();
try {
XContentParser parser = createParser(source);
QueryParseContext parseContext = new QueryParseContext(parser, parseFieldMatcher);
assertSame(XContentParser.Token.START_OBJECT, parser.nextToken());
aggParsers.parseAggregators(parseContext);
fail();
} catch (IllegalArgumentException e) {
assertThat(e.toString(), containsString("Two sibling aggregations cannot have the same name: [" + name + "]"));
}
}
public void testMissingName() throws Exception {
XContentBuilder source = JsonXContent.contentBuilder()
.startObject()
.startObject("by_date")
.startObject("date_histogram")
.field("field", "timestamp")
.field("interval", "month")
.endObject()
.startObject("aggs")
// the aggregation name is missing
//.startObject("tag_count")
.startObject("cardinality")
.field("field", "tag")
.endObject()
//.endObject()
.endObject()
.endObject()
.endObject();
try {
XContentParser parser = createParser(source);
QueryParseContext parseContext = new QueryParseContext(parser, parseFieldMatcher);
assertSame(XContentParser.Token.START_OBJECT, parser.nextToken());
aggParsers.parseAggregators(parseContext);
fail();
} catch (ParsingException e) {
// All Good
}
}
public void testMissingType() throws Exception {
XContentBuilder source = JsonXContent.contentBuilder()
.startObject()
.startObject("by_date")
.startObject("date_histogram")
.field("field", "timestamp")
.field("interval", "month")
.endObject()
.startObject("aggs")
.startObject("tag_count")
// the aggregation type is missing
//.startObject("cardinality")
.field("field", "tag")
//.endObject()
.endObject()
.endObject()
.endObject()
.endObject();
try {
XContentParser parser = createParser(source);
QueryParseContext parseContext = new QueryParseContext(parser, parseFieldMatcher);
assertSame(XContentParser.Token.START_OBJECT, parser.nextToken());
aggParsers.parseAggregators(parseContext);
fail();
} catch (ParsingException e) {
// All Good
}
}
@Override
protected NamedXContentRegistry xContentRegistry() {
return xContentRegistry;
}
}

View File

@ -45,6 +45,7 @@ import java.util.List;
import static java.util.Collections.emptyList;
import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode;
import static org.hamcrest.Matchers.hasSize;
public abstract class BaseAggregationTestCase<AB extends AbstractAggregationBuilder<AB>> extends ESTestCase {
@ -62,8 +63,6 @@ public abstract class BaseAggregationTestCase<AB extends AbstractAggregationBuil
}
private NamedWriteableRegistry namedWriteableRegistry;
protected AggregatorParsers aggParsers;
private NamedXContentRegistry xContentRegistry;
protected ParseFieldMatcher parseFieldMatcher;
@ -86,7 +85,6 @@ public abstract class BaseAggregationTestCase<AB extends AbstractAggregationBuil
entries.addAll(searchModule.getNamedWriteables());
namedWriteableRegistry = new NamedWriteableRegistry(entries);
xContentRegistry = new NamedXContentRegistry(searchModule.getNamedXContents());
aggParsers = searchModule.getSearchRequestParsers().aggParsers;
//create some random type with some default field, those types will stick around for all of the subclasses
currentTypes = new String[randomIntBetween(0, 5)];
for (int i = 0; i < currentTypes.length; i++) {
@ -116,29 +114,27 @@ public abstract class BaseAggregationTestCase<AB extends AbstractAggregationBuil
factoriesBuilder.toXContent(builder, ToXContent.EMPTY_PARAMS);
XContentBuilder shuffled = shuffleXContent(builder);
XContentParser parser = createParser(shuffled);
QueryParseContext parseContext = new QueryParseContext(parser, parseFieldMatcher);
assertSame(XContentParser.Token.START_OBJECT, parser.nextToken());
assertSame(XContentParser.Token.FIELD_NAME, parser.nextToken());
assertEquals(testAgg.name, parser.currentName());
assertSame(XContentParser.Token.START_OBJECT, parser.nextToken());
assertSame(XContentParser.Token.FIELD_NAME, parser.nextToken());
assertEquals(testAgg.type.name(), parser.currentName());
assertSame(XContentParser.Token.START_OBJECT, parser.nextToken());
AggregationBuilder newAgg = aggParsers.parser(testAgg.getType()).parse(testAgg.name, parseContext);
assertSame(XContentParser.Token.END_OBJECT, parser.currentToken());
assertSame(XContentParser.Token.END_OBJECT, parser.nextToken());
assertSame(XContentParser.Token.END_OBJECT, parser.nextToken());
assertNull(parser.nextToken());
assertNotNull(newAgg);
AggregationBuilder newAgg = parse(parser);
assertNotSame(newAgg, testAgg);
assertEquals(testAgg, newAgg);
assertEquals(testAgg.hashCode(), newAgg.hashCode());
}
protected AggregationBuilder parse(XContentParser parser) throws IOException {
QueryParseContext parseContext = new QueryParseContext(parser, parseFieldMatcher);
assertSame(XContentParser.Token.START_OBJECT, parser.nextToken());
AggregatorFactories.Builder parsed = AggregatorFactories.parseAggregators(parseContext);
assertThat(parsed.getAggregatorFactories(), hasSize(1));
assertThat(parsed.getPipelineAggregatorFactories(), hasSize(0));
AggregationBuilder newAgg = parsed.getAggregatorFactories().get(0);
assertNull(parser.nextToken());
assertNotNull(newAgg);
return newAgg;
}
/**
* Test serialization and deserialization of the test AggregatorFactory.
*/
public void testSerialization() throws IOException {
AB testAgg = createTestAggregatorBuilder();
try (BytesStreamOutput output = new BytesStreamOutput()) {

View File

@ -26,6 +26,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
@ -46,6 +47,7 @@ import java.util.List;
import static java.util.Collections.emptyList;
import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode;
import static org.hamcrest.Matchers.hasSize;
public abstract class BasePipelineAggregationTestCase<AF extends AbstractPipelineAggregationBuilder<AF>> extends ESTestCase {
@ -62,8 +64,8 @@ public abstract class BasePipelineAggregationTestCase<AF extends AbstractPipelin
}
private NamedWriteableRegistry namedWriteableRegistry;
private NamedXContentRegistry xContentRegistry;
protected AggregatorParsers aggParsers;
protected ParseFieldMatcher parseFieldMatcher;
protected abstract AF createTestAggregatorFactory();
@ -84,7 +86,7 @@ public abstract class BasePipelineAggregationTestCase<AF extends AbstractPipelin
entries.addAll(indicesModule.getNamedWriteables());
entries.addAll(searchModule.getNamedWriteables());
namedWriteableRegistry = new NamedWriteableRegistry(entries);
aggParsers = searchModule.getSearchRequestParsers().aggParsers;
xContentRegistry = new NamedXContentRegistry(searchModule.getNamedXContents());
//create some random type with some default field, those types will stick around for all of the subclasses
currentTypes = new String[randomIntBetween(0, 5)];
for (int i = 0; i < currentTypes.length; i++) {
@ -110,32 +112,29 @@ public abstract class BasePipelineAggregationTestCase<AF extends AbstractPipelin
factoriesBuilder.toXContent(builder, ToXContent.EMPTY_PARAMS);
XContentBuilder shuffled = shuffleXContent(builder);
XContentParser parser = createParser(shuffled);
QueryParseContext parseContext = new QueryParseContext(parser, parseFieldMatcher);
String contentString = factoriesBuilder.toString();
logger.info("Content string: {}", contentString);
assertSame(XContentParser.Token.START_OBJECT, parser.nextToken());
assertSame(XContentParser.Token.FIELD_NAME, parser.nextToken());
assertEquals(testAgg.getName(), parser.currentName());
assertSame(XContentParser.Token.START_OBJECT, parser.nextToken());
assertSame(XContentParser.Token.FIELD_NAME, parser.nextToken());
assertEquals(testAgg.type(), parser.currentName());
assertSame(XContentParser.Token.START_OBJECT, parser.nextToken());
PipelineAggregationBuilder newAgg = aggParsers.pipelineParser(testAgg.getWriteableName())
.parse(testAgg.getName(), parseContext);
assertSame(XContentParser.Token.END_OBJECT, parser.currentToken());
assertSame(XContentParser.Token.END_OBJECT, parser.nextToken());
assertSame(XContentParser.Token.END_OBJECT, parser.nextToken());
assertNull(parser.nextToken());
assertNotNull(newAgg);
PipelineAggregationBuilder newAgg = parse(parser);
assertNotSame(newAgg, testAgg);
assertEquals(testAgg, newAgg);
assertEquals(testAgg.hashCode(), newAgg.hashCode());
}
protected PipelineAggregationBuilder parse(XContentParser parser) throws IOException {
QueryParseContext parseContext = new QueryParseContext(parser, parseFieldMatcher);
assertSame(XContentParser.Token.START_OBJECT, parser.nextToken());
AggregatorFactories.Builder parsed = AggregatorFactories.parseAggregators(parseContext);
assertThat(parsed.getAggregatorFactories(), hasSize(0));
assertThat(parsed.getPipelineAggregatorFactories(), hasSize(1));
PipelineAggregationBuilder newAgg = parsed.getPipelineAggregatorFactories().get(0);
assertNull(parser.nextToken());
assertNotNull(newAgg);
return newAgg;
}
/**
* Test serialization and deserialization of the test AggregatorFactory.
*/
public void testSerialization() throws IOException {
AF testAgg = createTestAggregatorFactory();
try (BytesStreamOutput output = new BytesStreamOutput()) {
@ -199,4 +198,9 @@ public abstract class BasePipelineAggregationTestCase<AF extends AbstractPipelin
return INT_FIELD_NAME;
}
}
@Override
protected NamedXContentRegistry xContentRegistry() {
return xContentRegistry;
}
}

View File

@ -24,6 +24,7 @@ import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.script.Script;
import org.elasticsearch.search.aggregations.AggregationInitializationException;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.BaseAggregationTestCase;
import org.elasticsearch.search.aggregations.metrics.tophits.TopHitsAggregationBuilder;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
@ -185,15 +186,11 @@ public class TopHitsTests extends BaseAggregationTestCase<TopHitsAggregationBuil
" }\n" +
" }\n" +
"}";
try {
XContentParser parser = createParser(JsonXContent.jsonXContent, source);
QueryParseContext parseContext = new QueryParseContext(parser, parseFieldMatcher);
assertSame(XContentParser.Token.START_OBJECT, parser.nextToken());
aggParsers.parseAggregators(parseContext);
fail();
} catch (AggregationInitializationException e) {
assertThat(e.toString(), containsString("Aggregator [top_tags_hits] of type [top_hits] cannot accept sub-aggregations"));
}
XContentParser parser = createParser(JsonXContent.jsonXContent, source);
QueryParseContext parseContext = new QueryParseContext(parser, parseFieldMatcher);
assertSame(XContentParser.Token.START_OBJECT, parser.nextToken());
Exception e = expectThrows(AggregationInitializationException.class, () -> AggregatorFactories.parseAggregators(parseContext));
assertThat(e.toString(), containsString("Aggregator [top_tags_hits] of type [top_hits] cannot accept sub-aggregations"));
}
}

View File

@ -19,10 +19,8 @@
package org.elasticsearch.search.aggregations.pipeline.bucketmetrics;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.extended.ExtendedStatsBucketPipelineAggregationBuilder;
import static org.hamcrest.Matchers.equalTo;
@ -39,20 +37,18 @@ public class ExtendedStatsBucketTests extends AbstractBucketMetricsTestCase<Exte
}
public void testSigmaFromInt() throws Exception {
String content = XContentFactory.jsonBuilder()
XContentBuilder content = XContentFactory.jsonBuilder()
.startObject()
.field("sigma", 5)
.field("buckets_path", "test")
.endObject()
.string();
.startObject("name")
.startObject("extended_stats_bucket")
.field("sigma", 5)
.field("buckets_path", "test")
.endObject()
.endObject()
.endObject();
XContentParser parser = createParser(JsonXContent.jsonXContent, content);
QueryParseContext parseContext = new QueryParseContext(parser, parseFieldMatcher);
parser.nextToken(); // skip object start
ExtendedStatsBucketPipelineAggregationBuilder builder = (ExtendedStatsBucketPipelineAggregationBuilder) aggParsers
.pipelineParser(ExtendedStatsBucketPipelineAggregationBuilder.NAME)
.parse("test", parseContext);
ExtendedStatsBucketPipelineAggregationBuilder builder = (ExtendedStatsBucketPipelineAggregationBuilder) parse(
createParser(content));
assertThat(builder.sigma(), equalTo(5.0));
}

View File

@ -19,10 +19,8 @@
package org.elasticsearch.search.aggregations.pipeline.bucketmetrics;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.percentile.PercentilesBucketPipelineAggregationBuilder;
import static org.hamcrest.Matchers.equalTo;
@ -44,20 +42,17 @@ public class PercentilesBucketTests extends AbstractBucketMetricsTestCase<Percen
}
public void testPercentsFromMixedArray() throws Exception {
String content = XContentFactory.jsonBuilder()
XContentBuilder content = XContentFactory.jsonBuilder()
.startObject()
.field("buckets_path", "test")
.array("percents", 0, 20.0, 50, 75.99)
.endObject()
.string();
.startObject("name")
.startObject("percentiles_bucket")
.field("buckets_path", "test")
.array("percents", 0, 20.0, 50, 75.99)
.endObject()
.endObject()
.endObject();
XContentParser parser = createParser(JsonXContent.jsonXContent, content);
QueryParseContext parseContext = new QueryParseContext(parser, parseFieldMatcher);
parser.nextToken(); // skip object start
PercentilesBucketPipelineAggregationBuilder builder = (PercentilesBucketPipelineAggregationBuilder) aggParsers
.pipelineParser(PercentilesBucketPipelineAggregationBuilder.NAME)
.parse("test", parseContext);
PercentilesBucketPipelineAggregationBuilder builder = (PercentilesBucketPipelineAggregationBuilder) parse(createParser(content));
assertThat(builder.percents(), equalTo(new double[]{0.0, 20.0, 50.0, 75.99}));
}

View File

@ -19,9 +19,7 @@
package org.elasticsearch.search.aggregations.pipeline.moving.avg;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.search.aggregations.BasePipelineAggregationTestCase;
import org.elasticsearch.search.aggregations.PipelineAggregationBuilder;
import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy;
@ -33,8 +31,6 @@ import org.elasticsearch.search.aggregations.pipeline.movavg.models.HoltWintersM
import org.elasticsearch.search.aggregations.pipeline.movavg.models.LinearModel;
import org.elasticsearch.search.aggregations.pipeline.movavg.models.SimpleModel;
;
public class MovAvgTests extends BasePipelineAggregationTestCase<MovAvgPipelineAggregationBuilder> {
@Override
@ -103,25 +99,11 @@ public class MovAvgTests extends BasePipelineAggregationTestCase<MovAvgPipelineA
String json = "{" +
" \"commits_moving_avg\": {" +
" \"moving_avg\": {" +
" \"buckets_path\": \"commits\"" +
" \"buckets_path\": \"commits\"" +
" }" +
" }" +
"}";
XContentParser parser = createParser(JsonXContent.jsonXContent, json);
QueryParseContext parseContext = new QueryParseContext(parser, parseFieldMatcher);
assertSame(XContentParser.Token.START_OBJECT, parser.nextToken());
assertSame(XContentParser.Token.FIELD_NAME, parser.nextToken());
assertEquals(expected.getName(), parser.currentName());
assertSame(XContentParser.Token.START_OBJECT, parser.nextToken());
assertSame(XContentParser.Token.FIELD_NAME, parser.nextToken());
assertEquals(expected.type(), parser.currentName());
assertSame(XContentParser.Token.START_OBJECT, parser.nextToken());
PipelineAggregationBuilder newAgg = aggParsers.pipelineParser(expected.getWriteableName()).parse(expected.getName(), parseContext);
assertSame(XContentParser.Token.END_OBJECT, parser.currentToken());
assertSame(XContentParser.Token.END_OBJECT, parser.nextToken());
assertSame(XContentParser.Token.END_OBJECT, parser.nextToken());
assertNull(parser.nextToken());
assertNotNull(newAgg);
PipelineAggregationBuilder newAgg = parse(createParser(JsonXContent.jsonXContent, json));
assertNotSame(newAgg, expected);
assertEquals(expected, newAgg);
assertEquals(expected.hashCode(), newAgg.hashCode());

View File

@ -75,8 +75,7 @@ public class SearchSourceBuilderTests extends AbstractSearchTestCase {
parser.nextToken(); // sometimes we move it on the START_OBJECT to
// test the embedded case
}
SearchSourceBuilder newBuilder = SearchSourceBuilder.fromXContent(parseContext, searchRequestParsers.aggParsers,
searchRequestParsers.suggesters);
SearchSourceBuilder newBuilder = SearchSourceBuilder.fromXContent(parseContext, searchRequestParsers.suggesters);
assertNull(parser.nextToken());
assertEquals(testBuilder, newBuilder);
assertEquals(testBuilder.hashCode(), newBuilder.hashCode());
@ -114,7 +113,7 @@ public class SearchSourceBuilderTests extends AbstractSearchTestCase {
String restContent = " { \"_source\": { \"includes\": \"include\", \"excludes\": \"*.field2\"}}";
try (XContentParser parser = createParser(JsonXContent.jsonXContent, restContent)) {
SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.fromXContent(createParseContext(parser),
searchRequestParsers.aggParsers, searchRequestParsers.suggesters);
searchRequestParsers.suggesters);
assertArrayEquals(new String[]{"*.field2"}, searchSourceBuilder.fetchSource().excludes());
assertArrayEquals(new String[]{"include"}, searchSourceBuilder.fetchSource().includes());
}
@ -123,7 +122,7 @@ public class SearchSourceBuilderTests extends AbstractSearchTestCase {
String restContent = " { \"_source\": false}";
try (XContentParser parser = createParser(JsonXContent.jsonXContent, restContent)) {
SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.fromXContent(createParseContext(parser),
searchRequestParsers.aggParsers, searchRequestParsers.suggesters);
searchRequestParsers.suggesters);
assertArrayEquals(new String[]{}, searchSourceBuilder.fetchSource().excludes());
assertArrayEquals(new String[]{}, searchSourceBuilder.fetchSource().includes());
assertFalse(searchSourceBuilder.fetchSource().fetchSource());
@ -146,7 +145,7 @@ public class SearchSourceBuilderTests extends AbstractSearchTestCase {
" } }";
try (XContentParser parser = createParser(JsonXContent.jsonXContent, restContent)) {
ParsingException e = expectThrows(ParsingException.class, () -> SearchSourceBuilder.fromXContent(createParseContext(parser),
searchRequestParsers.aggParsers, searchRequestParsers.suggesters));
searchRequestParsers.suggesters));
assertEquals("[multi_match] malformed query, expected [END_OBJECT] but found [FIELD_NAME]", e.getMessage());
}
}
@ -156,7 +155,7 @@ public class SearchSourceBuilderTests extends AbstractSearchTestCase {
String restContent = " { \"sort\": \"foo\"}";
try (XContentParser parser = createParser(JsonXContent.jsonXContent, restContent)) {
SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.fromXContent(createParseContext(parser),
searchRequestParsers.aggParsers, searchRequestParsers.suggesters);
searchRequestParsers.suggesters);
assertEquals(1, searchSourceBuilder.sorts().size());
assertEquals(new FieldSortBuilder("foo"), searchSourceBuilder.sorts().get(0));
}
@ -172,7 +171,7 @@ public class SearchSourceBuilderTests extends AbstractSearchTestCase {
" ]}";
try (XContentParser parser = createParser(JsonXContent.jsonXContent, restContent)) {
SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.fromXContent(createParseContext(parser),
searchRequestParsers.aggParsers, searchRequestParsers.suggesters);
searchRequestParsers.suggesters);
assertEquals(5, searchSourceBuilder.sorts().size());
assertEquals(new FieldSortBuilder("post_date"), searchSourceBuilder.sorts().get(0));
assertEquals(new FieldSortBuilder("user"), searchSourceBuilder.sorts().get(1));
@ -196,7 +195,7 @@ public class SearchSourceBuilderTests extends AbstractSearchTestCase {
"}\n";
try (XContentParser parser = createParser(JsonXContent.jsonXContent, restContent)) {
SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.fromXContent(createParseContext(parser),
searchRequestParsers.aggParsers, searchRequestParsers.suggesters);
searchRequestParsers.suggesters);
assertEquals(1, searchSourceBuilder.aggregations().count());
}
}
@ -212,7 +211,7 @@ public class SearchSourceBuilderTests extends AbstractSearchTestCase {
"}\n";
try (XContentParser parser = createParser(JsonXContent.jsonXContent, restContent)) {
SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.fromXContent(createParseContext(parser),
searchRequestParsers.aggParsers, searchRequestParsers.suggesters);
searchRequestParsers.suggesters);
assertEquals(1, searchSourceBuilder.aggregations().count());
}
}
@ -238,7 +237,7 @@ public class SearchSourceBuilderTests extends AbstractSearchTestCase {
"}\n";
try (XContentParser parser = createParser(JsonXContent.jsonXContent, restContent)) {
SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.fromXContent(createParseContext(parser),
searchRequestParsers.aggParsers, searchRequestParsers.suggesters);
searchRequestParsers.suggesters);
assertEquals(1, searchSourceBuilder.rescores().size());
assertEquals(new QueryRescorerBuilder(QueryBuilders.matchQuery("content", "baz")).windowSize(50),
searchSourceBuilder.rescores().get(0));
@ -261,7 +260,7 @@ public class SearchSourceBuilderTests extends AbstractSearchTestCase {
"}\n";
try (XContentParser parser = createParser(JsonXContent.jsonXContent, restContent)) {
SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.fromXContent(createParseContext(parser),
searchRequestParsers.aggParsers, searchRequestParsers.suggesters);
searchRequestParsers.suggesters);
assertEquals(1, searchSourceBuilder.rescores().size());
assertEquals(new QueryRescorerBuilder(QueryBuilders.matchQuery("content", "baz")).windowSize(50),
searchSourceBuilder.rescores().get(0));
@ -274,7 +273,7 @@ public class SearchSourceBuilderTests extends AbstractSearchTestCase {
final String query = "{ \"query\": { \"match_all\": {}}, \"timeout\": \"" + timeout + "\"}";
try (XContentParser parser = createParser(JsonXContent.jsonXContent, query)) {
final SearchSourceBuilder builder = SearchSourceBuilder.fromXContent(createParseContext(parser),
searchRequestParsers.aggParsers, searchRequestParsers.suggesters);
searchRequestParsers.suggesters);
assertThat(builder.timeout(), equalTo(TimeValue.parseTimeValue(timeout, null, "timeout")));
}
}
@ -283,11 +282,8 @@ public class SearchSourceBuilderTests extends AbstractSearchTestCase {
final int timeout = randomIntBetween(1, 1024);
final String query = "{ \"query\": { \"match_all\": {}}, \"timeout\": \"" + timeout + "\"}";
try (XContentParser parser = createParser(JsonXContent.jsonXContent, query)) {
final ElasticsearchParseException e =
expectThrows(
ElasticsearchParseException.class,
() -> SearchSourceBuilder.fromXContent(createParseContext(parser),
searchRequestParsers.aggParsers, searchRequestParsers.suggesters));
final ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> SearchSourceBuilder.fromXContent(
createParseContext(parser), searchRequestParsers.suggesters));
assertThat(e, hasToString(containsString("unit is missing or unrecognized")));
}
}
@ -320,7 +316,7 @@ public class SearchSourceBuilderTests extends AbstractSearchTestCase {
String restContent = " { \"indices_boost\": {\"foo\": 1.0, \"bar\": 2.0}}";
try (XContentParser parser = createParser(JsonXContent.jsonXContent, restContent)) {
SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.fromXContent(createParseContext(parser),
searchRequestParsers.aggParsers, searchRequestParsers.suggesters);
searchRequestParsers.suggesters);
assertEquals(2, searchSourceBuilder.indexBoosts().size());
assertEquals(new SearchSourceBuilder.IndexBoost("foo", 1.0f), searchSourceBuilder.indexBoosts().get(0));
assertEquals(new SearchSourceBuilder.IndexBoost("bar", 2.0f), searchSourceBuilder.indexBoosts().get(1));
@ -337,7 +333,7 @@ public class SearchSourceBuilderTests extends AbstractSearchTestCase {
" ]}";
try (XContentParser parser = createParser(JsonXContent.jsonXContent, restContent)) {
SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.fromXContent(createParseContext(parser),
searchRequestParsers.aggParsers, searchRequestParsers.suggesters);
searchRequestParsers.suggesters);
assertEquals(3, searchSourceBuilder.indexBoosts().size());
assertEquals(new SearchSourceBuilder.IndexBoost("foo", 1.0f), searchSourceBuilder.indexBoosts().get(0));
assertEquals(new SearchSourceBuilder.IndexBoost("bar", 2.0f), searchSourceBuilder.indexBoosts().get(1));
@ -385,7 +381,7 @@ public class SearchSourceBuilderTests extends AbstractSearchTestCase {
private void assertIndicesBoostParseErrorMessage(String restContent, String expectedErrorMessage) throws IOException {
try (XContentParser parser = createParser(JsonXContent.jsonXContent, restContent)) {
ParsingException e = expectThrows(ParsingException.class, () -> SearchSourceBuilder.fromXContent(createParseContext(parser),
searchRequestParsers.aggParsers, searchRequestParsers.suggesters));
searchRequestParsers.suggesters));
assertEquals(expectedErrorMessage, e.getMessage());
}
}

View File

@ -88,8 +88,7 @@ public class TransportSearchTemplateAction extends HandledTransportAction<Search
try (XContentParser parser = XContentFactory.xContent(source).createParser(xContentRegistry, source)) {
SearchSourceBuilder builder = SearchSourceBuilder.searchSource();
builder.parseXContent(new QueryParseContext(parser, parseFieldMatcher),
searchRequestParsers.aggParsers, searchRequestParsers.suggesters);
builder.parseXContent(new QueryParseContext(parser, parseFieldMatcher), searchRequestParsers.suggesters);
builder.explain(request.isExplain());
builder.profile(request.isProfile());
searchRequest.source(builder);

View File

@ -156,9 +156,8 @@ public class TransportMultiPercolateAction extends HandledTransportAction<MultiP
PercolateRequest percolateRequest = multiPercolateRequest.requests().get(i);
BytesReference docSource = getResponseSources.get(i);
try {
SearchRequest searchRequest = TransportPercolateAction.createSearchRequest(
percolateRequest, docSource,
searchRequestParsers.aggParsers, xContentRegistry, parseFieldMatcher);
SearchRequest searchRequest = TransportPercolateAction.createSearchRequest(percolateRequest, docSource, xContentRegistry,
parseFieldMatcher);
multiSearchRequest.add(searchRequest);
} catch (Exception e) {
preFailures.put(i, new MultiPercolateResponse.Item(e));

View File

@ -48,7 +48,6 @@ import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.SearchRequestParsers;
import org.elasticsearch.search.aggregations.AggregatorParsers;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.threadpool.ThreadPool;
@ -103,8 +102,7 @@ public class TransportPercolateAction extends HandledTransportAction<PercolateRe
private void innerDoExecute(PercolateRequest request, BytesReference docSource, ActionListener<PercolateResponse> listener) {
SearchRequest searchRequest;
try {
searchRequest = createSearchRequest(request, docSource,
searchRequestParsers.aggParsers, xContentRegistry, parseFieldMatcher);
searchRequest = createSearchRequest(request, docSource, xContentRegistry, parseFieldMatcher);
} catch (IOException e) {
listener.onFailure(e);
return;
@ -127,7 +125,6 @@ public class TransportPercolateAction extends HandledTransportAction<PercolateRe
}
public static SearchRequest createSearchRequest(PercolateRequest percolateRequest, BytesReference documentSource,
AggregatorParsers aggParsers,
NamedXContentRegistry xContentRegistry,
ParseFieldMatcher parseFieldMatcher)
throws IOException {
@ -229,7 +226,7 @@ public class TransportPercolateAction extends HandledTransportAction<PercolateRe
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
try (XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(xContentRegistry, source)) {
QueryParseContext context = new QueryParseContext(parser, parseFieldMatcher);
searchSourceBuilder.parseXContent(context, aggParsers, null);
searchSourceBuilder.parseXContent(context, null);
searchRequest.source(searchSourceBuilder);
return searchRequest;
}

View File

@ -82,7 +82,7 @@ public class RestReindexAction extends AbstractBaseReindexRestHandler<ReindexReq
builder.map(source);
try (XContentParser innerParser = parser.contentType().xContent().createParser(parser.getXContentRegistry(), builder.bytes())) {
request.getSearchRequest().source().parseXContent(context.queryParseContext(innerParser),
context.searchRequestParsers.aggParsers, context.searchRequestParsers.suggesters);
context.searchRequestParsers.suggesters);
}
};

View File

@ -124,7 +124,7 @@ public class RestReindexActionTests extends ESTestCase {
}
try (XContentParser p = createParser(JsonXContent.jsonXContent, request)) {
ReindexRequest r = new ReindexRequest(new SearchRequest(), new IndexRequest());
SearchRequestParsers searchParsers = new SearchRequestParsers(null, null);
SearchRequestParsers searchParsers = new SearchRequestParsers(null);
RestReindexAction.PARSER.parse(p, r, new ReindexParseContext(searchParsers, ParseFieldMatcher.STRICT));
assertEquals("localhost", r.getRemoteInfo().getHost());
assertArrayEquals(new String[] {"source"}, r.getSearchRequest().indices());
@ -132,7 +132,7 @@ public class RestReindexActionTests extends ESTestCase {
}
public void testPipelineQueryParameterIsError() throws IOException {
SearchRequestParsers parsers = new SearchRequestParsers(null, null);
SearchRequestParsers parsers = new SearchRequestParsers(null);
RestReindexAction action = new RestReindexAction(Settings.EMPTY, mock(RestController.class), parsers, null);
FakeRestRequest.Builder request = new FakeRestRequest.Builder(xContentRegistry());