diff --git a/core/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java b/core/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java index bb20728ee20..046ee382ba1 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java @@ -90,8 +90,7 @@ public class RestMultiSearchAction extends BaseRestHandler { parseMultiLineRequest(restRequest, multiRequest.indicesOptions(), allowExplicitIndex, (searchRequest, parser) -> { try { final QueryParseContext queryParseContext = new QueryParseContext(parser, parseFieldMatcher); - searchRequest.source(SearchSourceBuilder.fromXContent(queryParseContext, - searchRequestParsers.aggParsers, searchRequestParsers.suggesters)); + searchRequest.source(SearchSourceBuilder.fromXContent(queryParseContext, searchRequestParsers.suggesters)); multiRequest.add(searchRequest); } catch (IOException e) { throw new ElasticsearchParseException("Exception when parsing search request", e); diff --git a/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java b/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java index 07a423468fa..23ec7ef39c5 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java @@ -93,7 +93,7 @@ public class RestSearchAction extends BaseRestHandler { searchRequest.indices(Strings.splitStringByCommaToArray(request.param("index"))); if (requestContentParser != null) { QueryParseContext context = new QueryParseContext(requestContentParser, parseFieldMatcher); - searchRequest.source().parseXContent(context, searchRequestParsers.aggParsers, searchRequestParsers.suggesters); + searchRequest.source().parseXContent(context, searchRequestParsers.suggesters); } // do not allow 'query_and_fetch' or 'dfs_query_and_fetch' search types diff --git a/core/src/main/java/org/elasticsearch/search/SearchModule.java b/core/src/main/java/org/elasticsearch/search/SearchModule.java index c046d4bfe07..3817b6c507c 100644 --- a/core/src/main/java/org/elasticsearch/search/SearchModule.java +++ b/core/src/main/java/org/elasticsearch/search/SearchModule.java @@ -94,8 +94,8 @@ import org.elasticsearch.plugins.SearchPlugin.ScoreFunctionSpec; import org.elasticsearch.plugins.SearchPlugin.SearchExtSpec; import org.elasticsearch.plugins.SearchPlugin.SearchExtensionSpec; import org.elasticsearch.search.aggregations.AggregationBuilder; -import org.elasticsearch.search.aggregations.Aggregator; -import org.elasticsearch.search.aggregations.AggregatorParsers; +import org.elasticsearch.search.aggregations.AggregatorFactories; +import org.elasticsearch.search.aggregations.BaseAggregationBuilder; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.PipelineAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.children.ChildrenAggregationBuilder; @@ -268,10 +268,6 @@ public class SearchModule { private final boolean transportClient; private final Map highlighters; private final Map> suggesters; - private final ParseFieldRegistry aggregationParserRegistry = new ParseFieldRegistry<>("aggregation"); - private final ParseFieldRegistry pipelineAggregationParserRegistry = new ParseFieldRegistry<>( - "pipline_aggregation"); - private final AggregatorParsers aggregatorParsers = new AggregatorParsers(aggregationParserRegistry, pipelineAggregationParserRegistry); private final ParseFieldRegistry significanceHeuristicParserRegistry = new ParseFieldRegistry<>( "significance_heuristic"); private final ParseFieldRegistry movingAverageModelParserRegistry = new ParseFieldRegistry<>( @@ -301,7 +297,7 @@ public class SearchModule { registerFetchSubPhases(plugins); registerSearchExts(plugins); registerShapes(); - searchRequestParsers = new SearchRequestParsers(aggregatorParsers, getSuggesters()); + searchRequestParsers = new SearchRequestParsers(getSuggesters()); } public List getNamedWriteables() { @@ -341,13 +337,6 @@ public class SearchModule { return movingAverageModelParserRegistry; } - /** - * Parsers for {@link AggregationBuilder}s and {@link PipelineAggregationBuilder}s. - */ - public AggregatorParsers getAggregatorParsers() { - return aggregatorParsers; - } - private void registerAggregations(List plugins) { registerAggregation(new AggregationSpec(AvgAggregationBuilder.NAME, AvgAggregationBuilder::new, AvgAggregationBuilder::parse) .addResultReader(InternalAvg::new)); @@ -433,7 +422,10 @@ public class SearchModule { private void registerAggregation(AggregationSpec spec) { if (false == transportClient) { - aggregationParserRegistry.register(spec.getParser(), spec.getName()); + namedXContents.add(new NamedXContentRegistry.Entry(BaseAggregationBuilder.class, spec.getName(), (p, c) -> { + AggregatorFactories.AggParseContext context = (AggregatorFactories.AggParseContext) c; + return spec.getParser().parse(context.name, context.queryParseContext); + })); } namedWriteables.add( new NamedWriteableRegistry.Entry(AggregationBuilder.class, spec.getName().getPreferredName(), spec.getReader())); @@ -527,7 +519,10 @@ public class SearchModule { private void registerPipelineAggregation(PipelineAggregationSpec spec) { if (false == transportClient) { - pipelineAggregationParserRegistry.register(spec.getParser(), spec.getName()); + namedXContents.add(new NamedXContentRegistry.Entry(BaseAggregationBuilder.class, spec.getName(), (p, c) -> { + AggregatorFactories.AggParseContext context = (AggregatorFactories.AggParseContext) c; + return spec.getParser().parse(context.name, context.queryParseContext); + })); } namedWriteables.add( new NamedWriteableRegistry.Entry(PipelineAggregationBuilder.class, spec.getName().getPreferredName(), spec.getReader())); diff --git a/core/src/main/java/org/elasticsearch/search/SearchRequestParsers.java b/core/src/main/java/org/elasticsearch/search/SearchRequestParsers.java index 279caa91aca..cb3e82fe2cf 100644 --- a/core/src/main/java/org/elasticsearch/search/SearchRequestParsers.java +++ b/core/src/main/java/org/elasticsearch/search/SearchRequestParsers.java @@ -19,8 +19,6 @@ package org.elasticsearch.search; -import org.elasticsearch.index.query.QueryParseContext; -import org.elasticsearch.search.aggregations.AggregatorParsers; import org.elasticsearch.search.suggest.Suggesters; /** @@ -32,25 +30,14 @@ public class SearchRequestParsers { // methods split across RestSearchAction and SearchSourceBuilder should be moved here // TODO: make all members private once parsing functions are moved here - // TODO: AggregatorParsers should be removed and the underlying maps of agg - // and pipeline agg parsers should be here - /** - * Agg and pipeline agg parsers that may be used in search requests. - * @see org.elasticsearch.search.builder.SearchSourceBuilder#fromXContent(QueryParseContext, AggregatorParsers, - * Suggesters) - */ - public final AggregatorParsers aggParsers; - // TODO: Suggesters should be removed and the underlying map moved here /** * Suggesters that may be used in search requests. - * @see org.elasticsearch.search.builder.SearchSourceBuilder#fromXContent(QueryParseContext, AggregatorParsers, - * Suggesters) + * @see org.elasticsearch.search.builder.SearchSourceBuilder#fromXContent(QueryParseContext, Suggesters) */ public final Suggesters suggesters; - public SearchRequestParsers(AggregatorParsers aggParsers, Suggesters suggesters) { - this.aggParsers = aggParsers; + public SearchRequestParsers(Suggesters suggesters) { this.suggesters = suggesters; } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/AbstractAggregationBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/AbstractAggregationBuilder.java index 47e512325fe..b3ad015f689 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/AbstractAggregationBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/AbstractAggregationBuilder.java @@ -117,6 +117,7 @@ public abstract class AbstractAggregationBuilder build(SearchContext context, AggregatorFactory parent) throws IOException; /** Associate metadata with this {@link AggregationBuilder}. */ + @Override public abstract AggregationBuilder setMetaData(Map metaData); /** Add a sub aggregation to this builder. */ @@ -77,13 +79,14 @@ public abstract class AggregationBuilder /** * Internal: Registers sub-factories with this factory. The sub-factory will be * responsible for the creation of sub-aggregators under the aggregator - * created by this factory. This is only for use by {@link AggregatorParsers}. + * created by this factory. This is only for use by {@link AggregatorFactories#parseAggregators(QueryParseContext)}. * * @param subFactories * The sub-factories * @return this factory (fluent interface) */ - protected abstract AggregationBuilder subAggregations(AggregatorFactories.Builder subFactories); + @Override + public abstract AggregationBuilder subAggregations(AggregatorFactories.Builder subFactories); /** Common xcontent fields shared among aggregator builders */ public static final class CommonFields extends ParseField.CommonFields { diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/AggregatorFactories.java b/core/src/main/java/org/elasticsearch/search/aggregations/AggregatorFactories.java index b0f52ffb130..106335380cc 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/AggregatorFactories.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/AggregatorFactories.java @@ -19,10 +19,13 @@ package org.elasticsearch.search.aggregations; import org.elasticsearch.action.support.ToXContentToBytes; +import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.support.AggregationPath; import org.elasticsearch.search.aggregations.support.AggregationPath.PathElement; @@ -40,8 +43,126 @@ import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; +import java.util.regex.Matcher; +import java.util.regex.Pattern; public class AggregatorFactories { + public static final Pattern VALID_AGG_NAME = Pattern.compile("[^\\[\\]>]+"); + + /** + * Parses the aggregation request recursively generating aggregator factories in turn. + * + * @param parseContext The parse context. + * + * @return The parsed aggregator factories. + * + * @throws IOException When parsing fails for unknown reasons. + */ + public static AggregatorFactories.Builder parseAggregators(QueryParseContext parseContext) throws IOException { + return parseAggregators(parseContext, 0); + } + + private static AggregatorFactories.Builder parseAggregators(QueryParseContext parseContext, int level) throws IOException { + Matcher validAggMatcher = VALID_AGG_NAME.matcher(""); + AggregatorFactories.Builder factories = new AggregatorFactories.Builder(); + + XContentParser.Token token = null; + XContentParser parser = parseContext.parser(); + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token != XContentParser.Token.FIELD_NAME) { + throw new ParsingException(parser.getTokenLocation(), + "Unexpected token " + token + " in [aggs]: aggregations definitions must start with the name of the aggregation."); + } + final String aggregationName = parser.currentName(); + if (!validAggMatcher.reset(aggregationName).matches()) { + throw new ParsingException(parser.getTokenLocation(), "Invalid aggregation name [" + aggregationName + + "]. Aggregation names must be alpha-numeric and can only contain '_' and '-'"); + } + + token = parser.nextToken(); + if (token != XContentParser.Token.START_OBJECT) { + throw new ParsingException(parser.getTokenLocation(), "Aggregation definition for [" + aggregationName + " starts with a [" + + token + "], expected a [" + XContentParser.Token.START_OBJECT + "]."); + } + + BaseAggregationBuilder aggBuilder = null; + AggregatorFactories.Builder subFactories = null; + + Map metaData = null; + + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token != XContentParser.Token.FIELD_NAME) { + throw new ParsingException( + parser.getTokenLocation(), "Expected [" + XContentParser.Token.FIELD_NAME + "] under a [" + + XContentParser.Token.START_OBJECT + "], but got a [" + token + "] in [" + aggregationName + "]", + parser.getTokenLocation()); + } + final String fieldName = parser.currentName(); + + token = parser.nextToken(); + if (token == XContentParser.Token.START_OBJECT) { + switch (fieldName) { + case "meta": + metaData = parser.map(); + break; + case "aggregations": + case "aggs": + if (subFactories != null) { + throw new ParsingException(parser.getTokenLocation(), + "Found two sub aggregation definitions under [" + aggregationName + "]"); + } + subFactories = parseAggregators(parseContext, level + 1); + break; + default: + if (aggBuilder != null) { + throw new ParsingException(parser.getTokenLocation(), "Found two aggregation type definitions in [" + + aggregationName + "]: [" + aggBuilder.getType() + "] and [" + fieldName + "]"); + } + + aggBuilder = parser.namedObject(BaseAggregationBuilder.class, fieldName, + new AggParseContext(aggregationName, parseContext)); + } + } else { + throw new ParsingException(parser.getTokenLocation(), "Expected [" + XContentParser.Token.START_OBJECT + "] under [" + + fieldName + "], but got a [" + token + "] in [" + aggregationName + "]"); + } + } + + if (aggBuilder == null) { + throw new ParsingException(parser.getTokenLocation(), "Missing definition for aggregation [" + aggregationName + "]", + parser.getTokenLocation()); + } else { + if (metaData != null) { + aggBuilder.setMetaData(metaData); + } + + if (subFactories != null) { + aggBuilder.subAggregations(subFactories); + } + + if (aggBuilder instanceof AggregationBuilder) { + factories.addAggregator((AggregationBuilder) aggBuilder); + } else { + factories.addPipelineAggregator((PipelineAggregationBuilder) aggBuilder); + } + } + } + + return factories; + } + + /** + * Context to parse and aggregation. This should eventually be removed and replaced with a String. + */ + public static final class AggParseContext { + public final String name; + public final QueryParseContext queryParseContext; + + public AggParseContext(String name, QueryParseContext queryParseContext) { + this.name = name; + this.queryParseContext = queryParseContext; + } + } public static final AggregatorFactories EMPTY = new AggregatorFactories(null, new AggregatorFactory[0], new ArrayList()); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/BaseAggregationBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/BaseAggregationBuilder.java new file mode 100644 index 00000000000..8f076cfe456 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/search/aggregations/BaseAggregationBuilder.java @@ -0,0 +1,46 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.aggregations; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; + +import java.util.Map; + +/** + * Interface shared by {@link AggregationBuilder} and {@link PipelineAggregationBuilder} so they can conveniently share the same namespace + * for {@link XContentParser#namedObject(Class, String, Object)}. + */ +public interface BaseAggregationBuilder { + /** + * The name of the type of aggregation built by this builder. + */ + String getType(); + + /** + * Set the aggregation's metadata. Returns {@code this} for chaining. + */ + BaseAggregationBuilder setMetaData(Map metaData); + + /** + * Set the sub aggregations if this aggregation supports sub aggregations. Returns {@code this} for chaining. + */ + BaseAggregationBuilder subAggregations(Builder subFactories); +} diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/PipelineAggregationBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/PipelineAggregationBuilder.java index f9462325471..8f965d1d87e 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/PipelineAggregationBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/PipelineAggregationBuilder.java @@ -20,7 +20,7 @@ package org.elasticsearch.search.aggregations; import org.elasticsearch.action.support.ToXContentToBytes; import org.elasticsearch.common.io.stream.NamedWriteable; -import org.elasticsearch.search.aggregations.AggregatorFactory; +import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import java.io.IOException; @@ -32,7 +32,7 @@ import java.util.Map; * specific type. */ public abstract class PipelineAggregationBuilder extends ToXContentToBytes - implements NamedWriteable { + implements NamedWriteable, BaseAggregationBuilder { protected final String name; protected final String[] bucketsPaths; @@ -79,6 +79,11 @@ public abstract class PipelineAggregationBuilder extends ToXContentToBytes protected abstract PipelineAggregator create() throws IOException; /** Associate metadata with this {@link PipelineAggregationBuilder}. */ + @Override public abstract PipelineAggregationBuilder setMetaData(Map metaData); + @Override + public PipelineAggregationBuilder subAggregations(Builder subFactories) { + throw new IllegalArgumentException("Aggregation [" + name + "] cannot define sub-aggregations"); + } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/AbstractPipelineAggregationBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/AbstractPipelineAggregationBuilder.java index 4fb2ed91401..8d28195d551 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/AbstractPipelineAggregationBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/AbstractPipelineAggregationBuilder.java @@ -171,4 +171,8 @@ public abstract class AbstractPipelineAggregationBuilder new SearchModule(Settings.EMPTY, false, - singletonList(registersDupeAggregation))); + expectThrows(IllegalArgumentException.class, () -> new NamedXContentRegistry(new SearchModule(Settings.EMPTY, false, + singletonList(registersDupeAggregation)).getNamedXContents())); SearchPlugin registersDupePipelineAggregation = new SearchPlugin() { public List getPipelineAggregations() { @@ -162,8 +165,8 @@ public class SearchModuleTests extends ModuleTestCase { .addResultReader(InternalDerivative::new)); } }; - expectThrows(IllegalArgumentException.class, () -> new SearchModule(Settings.EMPTY, false, - singletonList(registersDupePipelineAggregation))); + expectThrows(IllegalArgumentException.class, () -> new NamedXContentRegistry(new SearchModule(Settings.EMPTY, false, + singletonList(registersDupePipelineAggregation)).getNamedXContents())); } public void testRegisterSuggester() { @@ -221,7 +224,11 @@ public class SearchModuleTests extends ModuleTestCase { } })); - assertNotNull(module.getAggregatorParsers().parser("test")); + assertThat( + module.getNamedXContents().stream() + .filter(entry -> entry.categoryClass.equals(BaseAggregationBuilder.class) && entry.name.match("test")) + .collect(toList()), + hasSize(1)); } public void testRegisterPipelineAggregation() { @@ -232,7 +239,11 @@ public class SearchModuleTests extends ModuleTestCase { } })); - assertNotNull(module.getAggregatorParsers().pipelineParser("test")); + assertThat( + module.getNamedXContents().stream() + .filter(entry -> entry.categoryClass.equals(BaseAggregationBuilder.class) && entry.name.match("test")) + .collect(toList()), + hasSize(1)); } private static final String[] NON_DEPRECATED_QUERIES = new String[] { diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/AggregationCollectorTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/AggregationCollectorTests.java index 0e79655d828..936442f16a7 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/AggregationCollectorTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/AggregationCollectorTests.java @@ -24,7 +24,6 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.query.QueryParseContext; -import org.elasticsearch.search.SearchRequestParsers; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.ESSingleNodeTestCase; @@ -59,12 +58,11 @@ public class AggregationCollectorTests extends ESSingleNodeTestCase { } private boolean needsScores(IndexService index, String agg) throws IOException { - AggregatorParsers parser = getInstanceFromNode(SearchRequestParsers.class).aggParsers; XContentParser aggParser = createParser(JsonXContent.jsonXContent, agg); QueryParseContext parseContext = new QueryParseContext(aggParser, ParseFieldMatcher.STRICT); aggParser.nextToken(); SearchContext context = createSearchContext(index); - final AggregatorFactories factories = parser.parseAggregators(parseContext).build(context, null); + final AggregatorFactories factories = AggregatorFactories.parseAggregators(parseContext).build(context, null); final Aggregator[] aggregators = factories.createTopLevelAggregators(); assertEquals(1, aggregators.length); return aggregators[0].needsScores(); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/AggregatorFactoriesTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/AggregatorFactoriesTests.java index 1822b1e22e9..2abb4dcebc1 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/AggregatorFactoriesTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/AggregatorFactoriesTests.java @@ -18,14 +18,56 @@ */ package org.elasticsearch.search.aggregations; +import org.elasticsearch.common.ParseFieldMatcher; +import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.XContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.json.JsonXContent; +import org.elasticsearch.env.Environment; +import org.elasticsearch.index.query.QueryParseContext; +import org.elasticsearch.script.ScriptService; +import org.elasticsearch.search.SearchModule; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilders; +import org.elasticsearch.test.AbstractQueryTestCase; import org.elasticsearch.test.ESTestCase; import java.util.List; +import java.util.Random; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import static java.util.Collections.emptyList; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; public class AggregatorFactoriesTests extends ESTestCase { + private String[] currentTypes; + + private NamedXContentRegistry xContentRegistry; + protected ParseFieldMatcher parseFieldMatcher; + + @Override + public void setUp() throws Exception { + super.setUp(); + // we have to prefer CURRENT since with the range of versions we support + // it's rather unlikely to get the current actually. + Settings settings = Settings.builder().put("node.name", AbstractQueryTestCase.class.toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) + .put(ScriptService.SCRIPT_AUTO_RELOAD_ENABLED_SETTING.getKey(), false).build(); + // create some random type with some default field, those types will + // stick around for all of the subclasses + currentTypes = new String[randomIntBetween(0, 5)]; + for (int i = 0; i < currentTypes.length; i++) { + String type = randomAsciiOfLengthBetween(1, 10); + currentTypes[i] = type; + } + xContentRegistry = new NamedXContentRegistry(new SearchModule(settings, false, emptyList()).getNamedXContents()); + parseFieldMatcher = ParseFieldMatcher.STRICT; + } + public void testGetAggregatorFactories_returnsUnmodifiableList() { AggregatorFactories.Builder builder = new AggregatorFactories.Builder().addAggregator(AggregationBuilders.avg("foo")); @@ -42,4 +84,175 @@ public class AggregatorFactoriesTests extends ESTestCase { expectThrows(UnsupportedOperationException.class, () -> pipelineAggregatorFactories.add(PipelineAggregatorBuilders.avgBucket("bar", "path2"))); } + + public void testTwoTypes() throws Exception { + XContentBuilder source = JsonXContent.contentBuilder() + .startObject() + .startObject("in_stock") + .startObject("filter") + .startObject("range") + .startObject("stock") + .field("gt", 0) + .endObject() + .endObject() + .endObject() + .startObject("terms") + .field("field", "stock") + .endObject() + .endObject() + .endObject(); + XContentParser parser = createParser(source); + QueryParseContext parseContext = new QueryParseContext(parser, parseFieldMatcher); + assertSame(XContentParser.Token.START_OBJECT, parser.nextToken()); + Exception e = expectThrows(ParsingException.class, () -> AggregatorFactories.parseAggregators(parseContext)); + assertThat(e.toString(), containsString("Found two aggregation type definitions in [in_stock]: [filter] and [terms]")); + } + + public void testTwoAggs() throws Exception { + assumeFalse("Test only makes sense if XContent parser doesn't have strict duplicate checks enabled", + XContent.isStrictDuplicateDetectionEnabled()); + XContentBuilder source = JsonXContent.contentBuilder() + .startObject() + .startObject("by_date") + .startObject("date_histogram") + .field("field", "timestamp") + .field("interval", "month") + .endObject() + .startObject("aggs") + .startObject("tag_count") + .startObject("cardinality") + .field("field", "tag") + .endObject() + .endObject() + .endObject() + .startObject("aggs") // 2nd "aggs": illegal + .startObject("tag_count2") + .startObject("cardinality") + .field("field", "tag") + .endObject() + .endObject() + .endObject() + .endObject() + .endObject(); + XContentParser parser = createParser(source); + QueryParseContext parseContext = new QueryParseContext(parser, parseFieldMatcher); + assertSame(XContentParser.Token.START_OBJECT, parser.nextToken()); + Exception e = expectThrows(ParsingException.class, () -> AggregatorFactories.parseAggregators(parseContext)); + assertThat(e.toString(), containsString("Found two sub aggregation definitions under [by_date]")); + } + + public void testInvalidAggregationName() throws Exception { + Matcher matcher = Pattern.compile("[^\\[\\]>]+").matcher(""); + String name; + Random rand = random(); + int len = randomIntBetween(1, 5); + char[] word = new char[len]; + while (true) { + for (int i = 0; i < word.length; i++) { + word[i] = (char) rand.nextInt(127); + } + name = String.valueOf(word); + if (!matcher.reset(name).matches()) { + break; + } + } + + XContentBuilder source = JsonXContent.contentBuilder() + .startObject() + .startObject(name) + .startObject("filter") + .startObject("range") + .startObject("stock") + .field("gt", 0) + .endObject() + .endObject() + .endObject() + .endObject() + .endObject(); + XContentParser parser = createParser(source); + QueryParseContext parseContext = new QueryParseContext(parser, parseFieldMatcher); + assertSame(XContentParser.Token.START_OBJECT, parser.nextToken()); + Exception e = expectThrows(ParsingException.class, () -> AggregatorFactories.parseAggregators(parseContext)); + assertThat(e.toString(), containsString("Invalid aggregation name [" + name + "]")); + } + + public void testSameAggregationName() throws Exception { + assumeFalse("Test only makes sense if XContent parser doesn't have strict duplicate checks enabled", + XContent.isStrictDuplicateDetectionEnabled()); + final String name = randomAsciiOfLengthBetween(1, 10); + XContentBuilder source = JsonXContent.contentBuilder() + .startObject() + .startObject(name) + .startObject("terms") + .field("field", "a") + .endObject() + .endObject() + .startObject(name) + .startObject("terms") + .field("field", "b") + .endObject() + .endObject() + .endObject(); + XContentParser parser = createParser(source); + QueryParseContext parseContext = new QueryParseContext(parser, parseFieldMatcher); + assertSame(XContentParser.Token.START_OBJECT, parser.nextToken()); + Exception e = expectThrows(ParsingException.class, () -> AggregatorFactories.parseAggregators(parseContext)); + assertThat(e.toString(), containsString("Two sibling aggregations cannot have the same name: [" + name + "]")); + } + + public void testMissingName() throws Exception { + XContentBuilder source = JsonXContent.contentBuilder() + .startObject() + .startObject("by_date") + .startObject("date_histogram") + .field("field", "timestamp") + .field("interval", "month") + .endObject() + .startObject("aggs") + // the aggregation name is missing + //.startObject("tag_count") + .startObject("cardinality") + .field("field", "tag") + .endObject() + //.endObject() + .endObject() + .endObject() + .endObject(); + XContentParser parser = createParser(source); + QueryParseContext parseContext = new QueryParseContext(parser, parseFieldMatcher); + assertSame(XContentParser.Token.START_OBJECT, parser.nextToken()); + Exception e = expectThrows(ParsingException.class, () -> AggregatorFactories.parseAggregators(parseContext)); + assertThat(e.toString(), containsString("Expected [START_OBJECT] under [field], but got a [VALUE_STRING] in [cardinality]")); + } + + public void testMissingType() throws Exception { + XContentBuilder source = JsonXContent.contentBuilder() + .startObject() + .startObject("by_date") + .startObject("date_histogram") + .field("field", "timestamp") + .field("interval", "month") + .endObject() + .startObject("aggs") + .startObject("tag_count") + // the aggregation type is missing + //.startObject("cardinality") + .field("field", "tag") + //.endObject() + .endObject() + .endObject() + .endObject() + .endObject(); + XContentParser parser = createParser(source); + QueryParseContext parseContext = new QueryParseContext(parser, parseFieldMatcher); + assertSame(XContentParser.Token.START_OBJECT, parser.nextToken()); + Exception e = expectThrows(ParsingException.class, () -> AggregatorFactories.parseAggregators(parseContext)); + assertThat(e.toString(), containsString("Expected [START_OBJECT] under [field], but got a [VALUE_STRING] in [tag_count]")); + } + + @Override + protected NamedXContentRegistry xContentRegistry() { + return xContentRegistry; + } + } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/AggregatorParsingTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/AggregatorParsingTests.java deleted file mode 100644 index 5d6222e6979..00000000000 --- a/core/src/test/java/org/elasticsearch/search/aggregations/AggregatorParsingTests.java +++ /dev/null @@ -1,273 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.search.aggregations; - -import org.elasticsearch.common.ParseFieldMatcher; -import org.elasticsearch.common.ParsingException; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.NamedXContentRegistry; -import org.elasticsearch.common.xcontent.XContent; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.json.JsonXContent; -import org.elasticsearch.env.Environment; -import org.elasticsearch.index.query.QueryParseContext; -import org.elasticsearch.script.ScriptService; -import org.elasticsearch.search.SearchModule; -import org.elasticsearch.test.AbstractQueryTestCase; -import org.elasticsearch.test.ESTestCase; - -import java.util.Random; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -import static java.util.Collections.emptyList; -import static org.hamcrest.Matchers.containsString; - -public class AggregatorParsingTests extends ESTestCase { - - private String[] currentTypes; - - protected String[] getCurrentTypes() { - return currentTypes; - } - - protected AggregatorParsers aggParsers; - private NamedXContentRegistry xContentRegistry; - protected ParseFieldMatcher parseFieldMatcher; - - /** - * Setup for the whole base test class. - */ - @Override - public void setUp() throws Exception { - super.setUp(); - // we have to prefer CURRENT since with the range of versions we support - // it's rather unlikely to get the current actually. - Settings settings = Settings.builder().put("node.name", AbstractQueryTestCase.class.toString()) - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) - .put(ScriptService.SCRIPT_AUTO_RELOAD_ENABLED_SETTING.getKey(), false).build(); - SearchModule searchModule = new SearchModule(settings, false, emptyList()); - aggParsers = searchModule.getSearchRequestParsers().aggParsers; - // create some random type with some default field, those types will - // stick around for all of the subclasses - currentTypes = new String[randomIntBetween(0, 5)]; - for (int i = 0; i < currentTypes.length; i++) { - String type = randomAsciiOfLengthBetween(1, 10); - currentTypes[i] = type; - } - xContentRegistry = new NamedXContentRegistry(searchModule.getNamedXContents()); - parseFieldMatcher = ParseFieldMatcher.STRICT; - } - - public void testTwoTypes() throws Exception { - XContentBuilder source = JsonXContent.contentBuilder() - .startObject() - .startObject("in_stock") - .startObject("filter") - .startObject("range") - .startObject("stock") - .field("gt", 0) - .endObject() - .endObject() - .endObject() - .startObject("terms") - .field("field", "stock") - .endObject() - .endObject() - .endObject(); - try { - XContentParser parser = createParser(source); - QueryParseContext parseContext = new QueryParseContext(parser, parseFieldMatcher); - assertSame(XContentParser.Token.START_OBJECT, parser.nextToken()); - aggParsers.parseAggregators(parseContext); - fail(); - } catch (ParsingException e) { - assertThat(e.toString(), containsString("Found two aggregation type definitions in [in_stock]: [filter] and [terms]")); - } - } - - public void testTwoAggs() throws Exception { - assumeFalse("Test only makes sense if XContent parser doesn't have strict duplicate checks enabled", - XContent.isStrictDuplicateDetectionEnabled()); - XContentBuilder source = JsonXContent.contentBuilder() - .startObject() - .startObject("by_date") - .startObject("date_histogram") - .field("field", "timestamp") - .field("interval", "month") - .endObject() - .startObject("aggs") - .startObject("tag_count") - .startObject("cardinality") - .field("field", "tag") - .endObject() - .endObject() - .endObject() - .startObject("aggs") // 2nd "aggs": illegal - .startObject("tag_count2") - .startObject("cardinality") - .field("field", "tag") - .endObject() - .endObject() - .endObject() - .endObject() - .endObject(); - try { - XContentParser parser = createParser(source); - QueryParseContext parseContext = new QueryParseContext(parser, parseFieldMatcher); - assertSame(XContentParser.Token.START_OBJECT, parser.nextToken()); - aggParsers.parseAggregators(parseContext); - fail(); - } catch (ParsingException e) { - assertThat(e.toString(), containsString("Found two sub aggregation definitions under [by_date]")); - } - } - - public void testInvalidAggregationName() throws Exception { - Matcher matcher = Pattern.compile("[^\\[\\]>]+").matcher(""); - String name; - Random rand = random(); - int len = randomIntBetween(1, 5); - char[] word = new char[len]; - while (true) { - for (int i = 0; i < word.length; i++) { - word[i] = (char) rand.nextInt(127); - } - name = String.valueOf(word); - if (!matcher.reset(name).matches()) { - break; - } - } - - XContentBuilder source = JsonXContent.contentBuilder() - .startObject() - .startObject(name) - .startObject("filter") - .startObject("range") - .startObject("stock") - .field("gt", 0) - .endObject() - .endObject() - .endObject() - .endObject() - .endObject(); - try { - XContentParser parser = createParser(source); - QueryParseContext parseContext = new QueryParseContext(parser, parseFieldMatcher); - assertSame(XContentParser.Token.START_OBJECT, parser.nextToken()); - aggParsers.parseAggregators(parseContext); - fail(); - } catch (ParsingException e) { - assertThat(e.toString(), containsString("Invalid aggregation name [" + name + "]")); - } - } - - public void testSameAggregationName() throws Exception { - assumeFalse("Test only makes sense if XContent parser doesn't have strict duplicate checks enabled", - XContent.isStrictDuplicateDetectionEnabled()); - final String name = randomAsciiOfLengthBetween(1, 10); - XContentBuilder source = JsonXContent.contentBuilder() - .startObject() - .startObject(name) - .startObject("terms") - .field("field", "a") - .endObject() - .endObject() - .startObject(name) - .startObject("terms") - .field("field", "b") - .endObject() - .endObject() - .endObject(); - try { - XContentParser parser = createParser(source); - QueryParseContext parseContext = new QueryParseContext(parser, parseFieldMatcher); - assertSame(XContentParser.Token.START_OBJECT, parser.nextToken()); - aggParsers.parseAggregators(parseContext); - fail(); - } catch (IllegalArgumentException e) { - assertThat(e.toString(), containsString("Two sibling aggregations cannot have the same name: [" + name + "]")); - } - } - - public void testMissingName() throws Exception { - XContentBuilder source = JsonXContent.contentBuilder() - .startObject() - .startObject("by_date") - .startObject("date_histogram") - .field("field", "timestamp") - .field("interval", "month") - .endObject() - .startObject("aggs") - // the aggregation name is missing - //.startObject("tag_count") - .startObject("cardinality") - .field("field", "tag") - .endObject() - //.endObject() - .endObject() - .endObject() - .endObject(); - try { - XContentParser parser = createParser(source); - QueryParseContext parseContext = new QueryParseContext(parser, parseFieldMatcher); - assertSame(XContentParser.Token.START_OBJECT, parser.nextToken()); - aggParsers.parseAggregators(parseContext); - fail(); - } catch (ParsingException e) { - // All Good - } - } - - public void testMissingType() throws Exception { - XContentBuilder source = JsonXContent.contentBuilder() - .startObject() - .startObject("by_date") - .startObject("date_histogram") - .field("field", "timestamp") - .field("interval", "month") - .endObject() - .startObject("aggs") - .startObject("tag_count") - // the aggregation type is missing - //.startObject("cardinality") - .field("field", "tag") - //.endObject() - .endObject() - .endObject() - .endObject() - .endObject(); - try { - XContentParser parser = createParser(source); - QueryParseContext parseContext = new QueryParseContext(parser, parseFieldMatcher); - assertSame(XContentParser.Token.START_OBJECT, parser.nextToken()); - aggParsers.parseAggregators(parseContext); - fail(); - } catch (ParsingException e) { - // All Good - } - } - - @Override - protected NamedXContentRegistry xContentRegistry() { - return xContentRegistry; - } -} diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/BaseAggregationTestCase.java b/core/src/test/java/org/elasticsearch/search/aggregations/BaseAggregationTestCase.java index 3b4b85fe1fb..2d4b80932b0 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/BaseAggregationTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/BaseAggregationTestCase.java @@ -45,6 +45,7 @@ import java.util.List; import static java.util.Collections.emptyList; import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode; +import static org.hamcrest.Matchers.hasSize; public abstract class BaseAggregationTestCase> extends ESTestCase { @@ -62,8 +63,6 @@ public abstract class BaseAggregationTestCase> extends ESTestCase { @@ -62,8 +64,8 @@ public abstract class BasePipelineAggregationTestCase AggregatorFactories.parseAggregators(parseContext)); + assertThat(e.toString(), containsString("Aggregator [top_tags_hits] of type [top_hits] cannot accept sub-aggregations")); } } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/ExtendedStatsBucketTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/ExtendedStatsBucketTests.java index 486528627d1..b77c0bdd972 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/ExtendedStatsBucketTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/ExtendedStatsBucketTests.java @@ -19,10 +19,8 @@ package org.elasticsearch.search.aggregations.pipeline.bucketmetrics; +import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.json.JsonXContent; -import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.extended.ExtendedStatsBucketPipelineAggregationBuilder; import static org.hamcrest.Matchers.equalTo; @@ -39,20 +37,18 @@ public class ExtendedStatsBucketTests extends AbstractBucketMetricsTestCase { @Override @@ -103,25 +99,11 @@ public class MovAvgTests extends BasePipelineAggregationTestCase SearchSourceBuilder.fromXContent(createParseContext(parser), - searchRequestParsers.aggParsers, searchRequestParsers.suggesters)); + searchRequestParsers.suggesters)); assertEquals("[multi_match] malformed query, expected [END_OBJECT] but found [FIELD_NAME]", e.getMessage()); } } @@ -156,7 +155,7 @@ public class SearchSourceBuilderTests extends AbstractSearchTestCase { String restContent = " { \"sort\": \"foo\"}"; try (XContentParser parser = createParser(JsonXContent.jsonXContent, restContent)) { SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.fromXContent(createParseContext(parser), - searchRequestParsers.aggParsers, searchRequestParsers.suggesters); + searchRequestParsers.suggesters); assertEquals(1, searchSourceBuilder.sorts().size()); assertEquals(new FieldSortBuilder("foo"), searchSourceBuilder.sorts().get(0)); } @@ -172,7 +171,7 @@ public class SearchSourceBuilderTests extends AbstractSearchTestCase { " ]}"; try (XContentParser parser = createParser(JsonXContent.jsonXContent, restContent)) { SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.fromXContent(createParseContext(parser), - searchRequestParsers.aggParsers, searchRequestParsers.suggesters); + searchRequestParsers.suggesters); assertEquals(5, searchSourceBuilder.sorts().size()); assertEquals(new FieldSortBuilder("post_date"), searchSourceBuilder.sorts().get(0)); assertEquals(new FieldSortBuilder("user"), searchSourceBuilder.sorts().get(1)); @@ -196,7 +195,7 @@ public class SearchSourceBuilderTests extends AbstractSearchTestCase { "}\n"; try (XContentParser parser = createParser(JsonXContent.jsonXContent, restContent)) { SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.fromXContent(createParseContext(parser), - searchRequestParsers.aggParsers, searchRequestParsers.suggesters); + searchRequestParsers.suggesters); assertEquals(1, searchSourceBuilder.aggregations().count()); } } @@ -212,7 +211,7 @@ public class SearchSourceBuilderTests extends AbstractSearchTestCase { "}\n"; try (XContentParser parser = createParser(JsonXContent.jsonXContent, restContent)) { SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.fromXContent(createParseContext(parser), - searchRequestParsers.aggParsers, searchRequestParsers.suggesters); + searchRequestParsers.suggesters); assertEquals(1, searchSourceBuilder.aggregations().count()); } } @@ -238,7 +237,7 @@ public class SearchSourceBuilderTests extends AbstractSearchTestCase { "}\n"; try (XContentParser parser = createParser(JsonXContent.jsonXContent, restContent)) { SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.fromXContent(createParseContext(parser), - searchRequestParsers.aggParsers, searchRequestParsers.suggesters); + searchRequestParsers.suggesters); assertEquals(1, searchSourceBuilder.rescores().size()); assertEquals(new QueryRescorerBuilder(QueryBuilders.matchQuery("content", "baz")).windowSize(50), searchSourceBuilder.rescores().get(0)); @@ -261,7 +260,7 @@ public class SearchSourceBuilderTests extends AbstractSearchTestCase { "}\n"; try (XContentParser parser = createParser(JsonXContent.jsonXContent, restContent)) { SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.fromXContent(createParseContext(parser), - searchRequestParsers.aggParsers, searchRequestParsers.suggesters); + searchRequestParsers.suggesters); assertEquals(1, searchSourceBuilder.rescores().size()); assertEquals(new QueryRescorerBuilder(QueryBuilders.matchQuery("content", "baz")).windowSize(50), searchSourceBuilder.rescores().get(0)); @@ -274,7 +273,7 @@ public class SearchSourceBuilderTests extends AbstractSearchTestCase { final String query = "{ \"query\": { \"match_all\": {}}, \"timeout\": \"" + timeout + "\"}"; try (XContentParser parser = createParser(JsonXContent.jsonXContent, query)) { final SearchSourceBuilder builder = SearchSourceBuilder.fromXContent(createParseContext(parser), - searchRequestParsers.aggParsers, searchRequestParsers.suggesters); + searchRequestParsers.suggesters); assertThat(builder.timeout(), equalTo(TimeValue.parseTimeValue(timeout, null, "timeout"))); } } @@ -283,11 +282,8 @@ public class SearchSourceBuilderTests extends AbstractSearchTestCase { final int timeout = randomIntBetween(1, 1024); final String query = "{ \"query\": { \"match_all\": {}}, \"timeout\": \"" + timeout + "\"}"; try (XContentParser parser = createParser(JsonXContent.jsonXContent, query)) { - final ElasticsearchParseException e = - expectThrows( - ElasticsearchParseException.class, - () -> SearchSourceBuilder.fromXContent(createParseContext(parser), - searchRequestParsers.aggParsers, searchRequestParsers.suggesters)); + final ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> SearchSourceBuilder.fromXContent( + createParseContext(parser), searchRequestParsers.suggesters)); assertThat(e, hasToString(containsString("unit is missing or unrecognized"))); } } @@ -320,7 +316,7 @@ public class SearchSourceBuilderTests extends AbstractSearchTestCase { String restContent = " { \"indices_boost\": {\"foo\": 1.0, \"bar\": 2.0}}"; try (XContentParser parser = createParser(JsonXContent.jsonXContent, restContent)) { SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.fromXContent(createParseContext(parser), - searchRequestParsers.aggParsers, searchRequestParsers.suggesters); + searchRequestParsers.suggesters); assertEquals(2, searchSourceBuilder.indexBoosts().size()); assertEquals(new SearchSourceBuilder.IndexBoost("foo", 1.0f), searchSourceBuilder.indexBoosts().get(0)); assertEquals(new SearchSourceBuilder.IndexBoost("bar", 2.0f), searchSourceBuilder.indexBoosts().get(1)); @@ -337,7 +333,7 @@ public class SearchSourceBuilderTests extends AbstractSearchTestCase { " ]}"; try (XContentParser parser = createParser(JsonXContent.jsonXContent, restContent)) { SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.fromXContent(createParseContext(parser), - searchRequestParsers.aggParsers, searchRequestParsers.suggesters); + searchRequestParsers.suggesters); assertEquals(3, searchSourceBuilder.indexBoosts().size()); assertEquals(new SearchSourceBuilder.IndexBoost("foo", 1.0f), searchSourceBuilder.indexBoosts().get(0)); assertEquals(new SearchSourceBuilder.IndexBoost("bar", 2.0f), searchSourceBuilder.indexBoosts().get(1)); @@ -385,7 +381,7 @@ public class SearchSourceBuilderTests extends AbstractSearchTestCase { private void assertIndicesBoostParseErrorMessage(String restContent, String expectedErrorMessage) throws IOException { try (XContentParser parser = createParser(JsonXContent.jsonXContent, restContent)) { ParsingException e = expectThrows(ParsingException.class, () -> SearchSourceBuilder.fromXContent(createParseContext(parser), - searchRequestParsers.aggParsers, searchRequestParsers.suggesters)); + searchRequestParsers.suggesters)); assertEquals(expectedErrorMessage, e.getMessage()); } } diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportSearchTemplateAction.java index 68223583175..2fb5a989755 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportSearchTemplateAction.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportSearchTemplateAction.java @@ -88,8 +88,7 @@ public class TransportSearchTemplateAction extends HandledTransportAction listener) { SearchRequest searchRequest; try { - searchRequest = createSearchRequest(request, docSource, - searchRequestParsers.aggParsers, xContentRegistry, parseFieldMatcher); + searchRequest = createSearchRequest(request, docSource, xContentRegistry, parseFieldMatcher); } catch (IOException e) { listener.onFailure(e); return; @@ -127,7 +125,6 @@ public class TransportPercolateAction extends HandledTransportAction